From 6a4031a38329c30de87041e526d4b7fb3742aaa6 Mon Sep 17 00:00:00 2001 From: Alberto Geniola Date: Wed, 4 Dec 2024 23:59:40 +0100 Subject: [PATCH 001/677] Bump elmax-api to 0.0.6.3 (#131876) --- homeassistant/components/elmax/common.py | 2 +- homeassistant/components/elmax/config_flow.py | 2 +- homeassistant/components/elmax/cover.py | 4 ++-- homeassistant/components/elmax/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/elmax/conftest.py | 17 +++++++++++++++-- 7 files changed, 22 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/elmax/common.py b/homeassistant/components/elmax/common.py index 88e61e36a68..18350e45efe 100644 --- a/homeassistant/components/elmax/common.py +++ b/homeassistant/components/elmax/common.py @@ -35,7 +35,7 @@ def check_local_version_supported(api_version: str | None) -> bool: class DirectPanel(PanelEntry): """Helper class for wrapping a directly accessed Elmax Panel.""" - def __init__(self, panel_uri): + def __init__(self, panel_uri) -> None: """Construct the object.""" super().__init__(panel_uri, True, {}) diff --git a/homeassistant/components/elmax/config_flow.py b/homeassistant/components/elmax/config_flow.py index bf479e997ef..3bb01efd3d5 100644 --- a/homeassistant/components/elmax/config_flow.py +++ b/homeassistant/components/elmax/config_flow.py @@ -203,7 +203,7 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_direct(self, user_input: dict[str, Any]) -> ConfigFlowResult: """Handle the direct setup step.""" - self._selected_mode = CONF_ELMAX_MODE_CLOUD + self._selected_mode = CONF_ELMAX_MODE_DIRECT if user_input is None: return self.async_show_form( step_id=CONF_ELMAX_MODE_DIRECT, diff --git a/homeassistant/components/elmax/cover.py b/homeassistant/components/elmax/cover.py index a53c28c5f33..403bc51dbff 100644 --- a/homeassistant/components/elmax/cover.py +++ b/homeassistant/components/elmax/cover.py @@ -121,13 +121,13 @@ class ElmaxCover(ElmaxEntity, CoverEntity): else: _LOGGER.debug("Ignoring stop request as the cover is IDLE") - async def async_open_cover(self, **kwargs): + async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" await self.coordinator.http_client.execute_command( endpoint_id=self._device.endpoint_id, command=CoverCommand.UP ) - async def async_close_cover(self, **kwargs): + async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self.coordinator.http_client.execute_command( endpoint_id=self._device.endpoint_id, command=CoverCommand.DOWN diff --git a/homeassistant/components/elmax/manifest.json b/homeassistant/components/elmax/manifest.json index efa97a9f6b9..dfa20326d0c 100644 --- a/homeassistant/components/elmax/manifest.json +++ b/homeassistant/components/elmax/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/elmax", "iot_class": "cloud_polling", "loggers": ["elmax_api"], - "requirements": ["elmax-api==0.0.6.1"], + "requirements": ["elmax-api==0.0.6.3"], "zeroconf": [ { "type": "_elmax-ssl._tcp.local." diff --git a/requirements_all.txt b/requirements_all.txt index 20f105b7f07..9cca8da6fc8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -824,7 +824,7 @@ eliqonline==1.2.2 elkm1-lib==2.2.10 # homeassistant.components.elmax -elmax-api==0.0.6.1 +elmax-api==0.0.6.3 # homeassistant.components.elvia elvia==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 38440ddcf52..dec62458540 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -699,7 +699,7 @@ elgato==5.1.2 elkm1-lib==2.2.10 # homeassistant.components.elmax -elmax-api==0.0.6.1 +elmax-api==0.0.6.3 # homeassistant.components.elvia elvia==0.1.0 diff --git a/tests/components/elmax/conftest.py b/tests/components/elmax/conftest.py index f92fc2f1827..f8cf33ffe1a 100644 --- a/tests/components/elmax/conftest.py +++ b/tests/components/elmax/conftest.py @@ -1,6 +1,7 @@ """Configuration for Elmax tests.""" from collections.abc import Generator +from datetime import datetime, timedelta import json from unittest.mock import AsyncMock, patch @@ -11,6 +12,7 @@ from elmax_api.constants import ( ENDPOINT_LOGIN, ) from httpx import Response +import jwt import pytest import respx @@ -64,9 +66,20 @@ def httpx_mock_direct_fixture() -> Generator[respx.MockRouter]: ) as respx_mock: # Mock Login POST. login_route = respx_mock.post(f"/api/v2/{ENDPOINT_LOGIN}", name="login") - login_route.return_value = Response( - 200, json=json.loads(load_fixture("direct/login.json", "elmax")) + + login_json = json.loads(load_fixture("direct/login.json", "elmax")) + decoded_jwt = jwt.decode_complete( + login_json["token"].split(" ")[1], + algorithms="HS256", + options={"verify_signature": False}, ) + expiration = datetime.now() + timedelta(hours=1) + decoded_jwt["payload"]["exp"] = int(expiration.timestamp()) + jws_string = jwt.encode( + payload=decoded_jwt["payload"], algorithm="HS256", key="" + ) + login_json["token"] = f"JWT {jws_string}" + login_route.return_value = Response(200, json=login_json) # Mock Device list GET. list_devices_route = respx_mock.get( From cf6d33635b2c1cb929cb4a3f4015a7e0c5107153 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 5 Dec 2024 20:52:48 -0600 Subject: [PATCH 002/677] Fix deprecated call to mimetypes.guess_type in CachingStaticResource (#132299) --- homeassistant/components/http/static.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/http/static.py b/homeassistant/components/http/static.py index 29c5840a4bf..9ca34af3741 100644 --- a/homeassistant/components/http/static.py +++ b/homeassistant/components/http/static.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Mapping from pathlib import Path +import sys from typing import Final from aiohttp.hdrs import CACHE_CONTROL, CONTENT_TYPE @@ -17,6 +18,15 @@ CACHE_HEADER = f"public, max-age={CACHE_TIME}" CACHE_HEADERS: Mapping[str, str] = {CACHE_CONTROL: CACHE_HEADER} RESPONSE_CACHE: LRU[tuple[str, Path], tuple[Path, str]] = LRU(512) +if sys.version_info >= (3, 13): + # guess_type is soft-deprecated in 3.13 + # for paths and should only be used for + # URLs. guess_file_type should be used + # for paths instead. + _GUESSER = CONTENT_TYPES.guess_file_type +else: + _GUESSER = CONTENT_TYPES.guess_type + class CachingStaticResource(StaticResource): """Static Resource handler that will add cache headers.""" @@ -37,9 +47,7 @@ class CachingStaticResource(StaticResource): # Must be directory index; ignore caching return response file_path = response._path # noqa: SLF001 - response.content_type = ( - CONTENT_TYPES.guess_type(file_path)[0] or FALLBACK_CONTENT_TYPE - ) + response.content_type = _GUESSER(file_path)[0] or FALLBACK_CONTENT_TYPE # Cache actual header after setter construction. content_type = response.headers[CONTENT_TYPE] RESPONSE_CACHE[key] = (file_path, content_type) From a47e5398f03fe49b92d639a0733bab9fb4a5ce69 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Fri, 6 Dec 2024 08:04:02 +1000 Subject: [PATCH 003/677] Bump tesla-fleet-api to 0.8.5 (#132339) --- homeassistant/components/tesla_fleet/const.py | 1 + homeassistant/components/tesla_fleet/manifest.json | 2 +- homeassistant/components/teslemetry/manifest.json | 2 +- homeassistant/components/tessie/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/tesla_fleet/snapshots/test_diagnostics.ambr | 1 + 7 files changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/tesla_fleet/const.py b/homeassistant/components/tesla_fleet/const.py index 53e34092326..c70cc3291f7 100644 --- a/homeassistant/components/tesla_fleet/const.py +++ b/homeassistant/components/tesla_fleet/const.py @@ -21,6 +21,7 @@ SCOPES = [ Scope.OPENID, Scope.OFFLINE_ACCESS, Scope.VEHICLE_DEVICE_DATA, + Scope.VEHICLE_LOCATION, Scope.VEHICLE_CMDS, Scope.VEHICLE_CHARGING_CMDS, Scope.ENERGY_DEVICE_DATA, diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json index f27929032d7..95062a8f856 100644 --- a/homeassistant/components/tesla_fleet/manifest.json +++ b/homeassistant/components/tesla_fleet/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/tesla_fleet", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "requirements": ["tesla-fleet-api==0.8.4"] + "requirements": ["tesla-fleet-api==0.8.5"] } diff --git a/homeassistant/components/teslemetry/manifest.json b/homeassistant/components/teslemetry/manifest.json index fc82dea6445..3736d76bf36 100644 --- a/homeassistant/components/teslemetry/manifest.json +++ b/homeassistant/components/teslemetry/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/teslemetry", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "requirements": ["tesla-fleet-api==0.8.4", "teslemetry-stream==0.4.2"] + "requirements": ["tesla-fleet-api==0.8.5", "teslemetry-stream==0.4.2"] } diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json index cab9f4c706d..2b8ae924fe3 100644 --- a/homeassistant/components/tessie/manifest.json +++ b/homeassistant/components/tessie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tessie", "iot_class": "cloud_polling", "loggers": ["tessie", "tesla-fleet-api"], - "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.8.4"] + "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.8.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9cca8da6fc8..cd9bfa1cb61 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2810,7 +2810,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.8.4 +tesla-fleet-api==0.8.5 # homeassistant.components.powerwall tesla-powerwall==0.5.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index dec62458540..23cc8c338e4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2238,7 +2238,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.8.4 +tesla-fleet-api==0.8.5 # homeassistant.components.powerwall tesla-powerwall==0.5.2 diff --git a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr index eb8c57910a4..cdb24b1d2b5 100644 --- a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr +++ b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr @@ -165,6 +165,7 @@ 'openid', 'offline_access', 'vehicle_device_data', + 'vehicle_location', 'vehicle_cmds', 'vehicle_charging_cmds', 'energy_device_data', From 92392ab3d4e9a60a2de18a435c3a33319029f689 Mon Sep 17 00:00:00 2001 From: robinostlund Date: Thu, 5 Dec 2024 21:14:04 +0100 Subject: [PATCH 004/677] Add missing UnitOfPower to sensor (#132352) * Add missing UnitOfPower to sensor * Update homeassistant/components/sensor/const.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * adding to number --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/number/const.py | 8 +++++++- homeassistant/components/sensor/const.py | 8 +++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 7330b781e75..e182d015101 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -480,7 +480,13 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { NumberDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, NumberDeviceClass.PM25: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, NumberDeviceClass.POWER_FACTOR: {PERCENTAGE, None}, - NumberDeviceClass.POWER: {UnitOfPower.WATT, UnitOfPower.KILO_WATT}, + NumberDeviceClass.POWER: { + UnitOfPower.WATT, + UnitOfPower.KILO_WATT, + UnitOfPower.MEGA_WATT, + UnitOfPower.GIGA_WATT, + UnitOfPower.TERA_WATT, + }, NumberDeviceClass.PRECIPITATION: set(UnitOfPrecipitationDepth), NumberDeviceClass.PRECIPITATION_INTENSITY: set(UnitOfVolumetricFlux), NumberDeviceClass.PRESSURE: set(UnitOfPressure), diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index 87012c3631a..1700c7c6ca9 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -579,7 +579,13 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { SensorDeviceClass.PM10: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, SensorDeviceClass.PM25: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER}, SensorDeviceClass.POWER_FACTOR: {PERCENTAGE, None}, - SensorDeviceClass.POWER: {UnitOfPower.WATT, UnitOfPower.KILO_WATT}, + SensorDeviceClass.POWER: { + UnitOfPower.WATT, + UnitOfPower.KILO_WATT, + UnitOfPower.MEGA_WATT, + UnitOfPower.GIGA_WATT, + UnitOfPower.TERA_WATT, + }, SensorDeviceClass.PRECIPITATION: set(UnitOfPrecipitationDepth), SensorDeviceClass.PRECIPITATION_INTENSITY: set(UnitOfVolumetricFlux), SensorDeviceClass.PRESSURE: set(UnitOfPressure), From dad81927cbdfe576f95dfc46ae6963df931d5148 Mon Sep 17 00:00:00 2001 From: Diogo Gomes Date: Thu, 5 Dec 2024 17:45:04 +0000 Subject: [PATCH 005/677] Removes references to croniter from utility_meter (#132364) remove croniter --- homeassistant/components/utility_meter/__init__.py | 13 ++++++++----- .../components/utility_meter/manifest.json | 1 - 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/utility_meter/__init__.py b/homeassistant/components/utility_meter/__init__.py index c6a8635f831..aac31e085a0 100644 --- a/homeassistant/components/utility_meter/__init__.py +++ b/homeassistant/components/utility_meter/__init__.py @@ -1,9 +1,9 @@ """Support for tracking consumption over given periods of time.""" -from datetime import timedelta +from datetime import datetime, timedelta import logging -from croniter import croniter +from cronsim import CronSim, CronSimError import voluptuous as vol from homeassistant.components.select import DOMAIN as SELECT_DOMAIN @@ -47,9 +47,12 @@ DEFAULT_OFFSET = timedelta(hours=0) def validate_cron_pattern(pattern): """Check that the pattern is well-formed.""" - if croniter.is_valid(pattern): - return pattern - raise vol.Invalid("Invalid pattern") + try: + CronSim(pattern, datetime(2020, 1, 1)) # any date will do + except CronSimError as err: + _LOGGER.error("Invalid cron pattern %s: %s", pattern, err) + raise vol.Invalid("Invalid pattern") from err + return pattern def period_or_cron(config): diff --git a/homeassistant/components/utility_meter/manifest.json b/homeassistant/components/utility_meter/manifest.json index 31a2d4e9584..5167c51469d 100644 --- a/homeassistant/components/utility_meter/manifest.json +++ b/homeassistant/components/utility_meter/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/utility_meter", "integration_type": "helper", "iot_class": "local_push", - "loggers": ["croniter"], "quality_scale": "internal", "requirements": ["cronsim==2.6"] } From bf20ffae9622b0d3f45e01fb0bcaf8f8f0e436a8 Mon Sep 17 00:00:00 2001 From: Glenn Waters Date: Thu, 5 Dec 2024 22:32:33 -0500 Subject: [PATCH 006/677] Bump upb-lib to 0.5.9 (#132411) --- homeassistant/components/upb/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/upb/manifest.json b/homeassistant/components/upb/manifest.json index 6b49c859771..1e61747b3f1 100644 --- a/homeassistant/components/upb/manifest.json +++ b/homeassistant/components/upb/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/upb", "iot_class": "local_push", "loggers": ["upb_lib"], - "requirements": ["upb-lib==0.5.8"] + "requirements": ["upb-lib==0.5.9"] } diff --git a/requirements_all.txt b/requirements_all.txt index cd9bfa1cb61..d3322ff4d4d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2915,7 +2915,7 @@ unifiled==0.11 universal-silabs-flasher==0.0.25 # homeassistant.components.upb -upb-lib==0.5.8 +upb-lib==0.5.9 # homeassistant.components.upcloud upcloud-api==2.6.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 23cc8c338e4..6e0b15b3802 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2322,7 +2322,7 @@ unifi-discovery==1.2.0 universal-silabs-flasher==0.0.25 # homeassistant.components.upb -upb-lib==0.5.8 +upb-lib==0.5.9 # homeassistant.components.upcloud upcloud-api==2.6.0 From 3f9f0f8ac29eebbd410bcd31e7466b0c53bc9ce7 Mon Sep 17 00:00:00 2001 From: Blake Bryant Date: Thu, 5 Dec 2024 23:28:02 -0800 Subject: [PATCH 007/677] Bump pydeako to 0.6.0 (#132432) feat: update deako integration to use improved version of pydeako Some things of note: - simplified errors - pydeako has introduced some connection improvements See here: https://github.com/DeakoLights/pydeako/releases/tag/0.6.0 --- homeassistant/components/deako/__init__.py | 11 ++----- homeassistant/components/deako/config_flow.py | 2 +- homeassistant/components/deako/light.py | 2 +- homeassistant/components/deako/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/deako/test_init.py | 31 ++----------------- 7 files changed, 11 insertions(+), 41 deletions(-) diff --git a/homeassistant/components/deako/__init__.py b/homeassistant/components/deako/__init__.py index fdcf09fad60..7a169defe01 100644 --- a/homeassistant/components/deako/__init__.py +++ b/homeassistant/components/deako/__init__.py @@ -4,8 +4,7 @@ from __future__ import annotations import logging -from pydeako.deako import Deako, DeviceListTimeout, FindDevicesTimeout -from pydeako.discover import DeakoDiscoverer +from pydeako import Deako, DeakoDiscoverer, FindDevicesError from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigEntry @@ -30,12 +29,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: DeakoConfigEntry) -> boo await connection.connect() try: await connection.find_devices() - except DeviceListTimeout as exc: # device list never received - _LOGGER.warning("Device not responding to device list") - await connection.disconnect() - raise ConfigEntryNotReady(exc) from exc - except FindDevicesTimeout as exc: # total devices expected not received - _LOGGER.warning("Device not responding to device requests") + except FindDevicesError as exc: + _LOGGER.warning("Error finding devices: %s", exc) await connection.disconnect() raise ConfigEntryNotReady(exc) from exc diff --git a/homeassistant/components/deako/config_flow.py b/homeassistant/components/deako/config_flow.py index d0676fa81d9..273cbf2795e 100644 --- a/homeassistant/components/deako/config_flow.py +++ b/homeassistant/components/deako/config_flow.py @@ -1,6 +1,6 @@ """Config flow for deako.""" -from pydeako.discover import DeakoDiscoverer, DevicesNotFoundException +from pydeako import DeakoDiscoverer, DevicesNotFoundException from homeassistant.components import zeroconf from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/deako/light.py b/homeassistant/components/deako/light.py index c7ff8765402..75b01935c9a 100644 --- a/homeassistant/components/deako/light.py +++ b/homeassistant/components/deako/light.py @@ -2,7 +2,7 @@ from typing import Any -from pydeako.deako import Deako +from pydeako import Deako from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/deako/manifest.json b/homeassistant/components/deako/manifest.json index e3099439b9d..f4f4782530b 100644 --- a/homeassistant/components/deako/manifest.json +++ b/homeassistant/components/deako/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/deako", "iot_class": "local_polling", "loggers": ["pydeako"], - "requirements": ["pydeako==0.5.4"], + "requirements": ["pydeako==0.6.0"], "single_config_entry": true, "zeroconf": ["_deako._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index d3322ff4d4d..f2a3077cb68 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1841,7 +1841,7 @@ pydaikin==2.13.7 pydanfossair==0.1.0 # homeassistant.components.deako -pydeako==0.5.4 +pydeako==0.6.0 # homeassistant.components.deconz pydeconz==118 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6e0b15b3802..9090bfa3472 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1488,7 +1488,7 @@ pycsspeechtts==1.0.8 pydaikin==2.13.7 # homeassistant.components.deako -pydeako==0.5.4 +pydeako==0.6.0 # homeassistant.components.deconz pydeconz==118 diff --git a/tests/components/deako/test_init.py b/tests/components/deako/test_init.py index b4c0e8bb1f7..c2291330feb 100644 --- a/tests/components/deako/test_init.py +++ b/tests/components/deako/test_init.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock -from pydeako.deako import DeviceListTimeout, FindDevicesTimeout +from pydeako import FindDevicesError from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -37,7 +37,7 @@ async def test_deako_async_setup_entry( assert mock_config_entry.runtime_data == pydeako_deako_mock.return_value -async def test_deako_async_setup_entry_device_list_timeout( +async def test_deako_async_setup_entry_devices_error( hass: HomeAssistant, mock_config_entry: MockConfigEntry, pydeako_deako_mock: MagicMock, @@ -47,32 +47,7 @@ async def test_deako_async_setup_entry_device_list_timeout( mock_config_entry.add_to_hass(hass) - pydeako_deako_mock.return_value.find_devices.side_effect = DeviceListTimeout() - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - pydeako_deako_mock.assert_called_once_with( - pydeako_discoverer_mock.return_value.get_address - ) - pydeako_deako_mock.return_value.connect.assert_called_once() - pydeako_deako_mock.return_value.find_devices.assert_called_once() - pydeako_deako_mock.return_value.disconnect.assert_called_once() - - assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_deako_async_setup_entry_find_devices_timeout( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - pydeako_deako_mock: MagicMock, - pydeako_discoverer_mock: MagicMock, -) -> None: - """Test async_setup_entry raises ConfigEntryNotReady when pydeako raises FindDevicesTimeout.""" - - mock_config_entry.add_to_hass(hass) - - pydeako_deako_mock.return_value.find_devices.side_effect = FindDevicesTimeout() + pydeako_deako_mock.return_value.find_devices.side_effect = FindDevicesError() await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() From d919de6734812a6362e1dc8e21028ff776af2250 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 5 Dec 2024 19:50:02 -0600 Subject: [PATCH 008/677] Bump aiohttp to 3.11.10 (#132441) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index ed7e995408f..0f94948b0bd 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -5,7 +5,7 @@ aiodiscover==2.1.0 aiodns==3.2.0 aiohasupervisor==0.2.1 aiohttp-fast-zlib==0.2.0 -aiohttp==3.11.9 +aiohttp==3.11.10 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index 2ceb074cc48..30ebd72f469 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 "aiohasupervisor==0.2.1", - "aiohttp==3.11.9", + "aiohttp==3.11.10", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index 7aadd55c024..554d2de0aab 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.2.1 -aiohttp==3.11.9 +aiohttp==3.11.10 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 aiozoneinfo==0.2.1 From 1dfd4e80b9c444ed07f97cd4c5687035bd78b734 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 5 Dec 2024 21:23:24 -0600 Subject: [PATCH 009/677] Bump aioesphomeapi to 28.0.0 (#132447) --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 77a3164d94c..775ffbff4c8 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -16,7 +16,7 @@ "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "mqtt": ["esphome/discover/#"], "requirements": [ - "aioesphomeapi==27.0.3", + "aioesphomeapi==28.0.0", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.1.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index f2a3077cb68..660752d9d24 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.3 +aioesphomeapi==28.0.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9090bfa3472..acf19a45832 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -231,7 +231,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.3 +aioesphomeapi==28.0.0 # homeassistant.components.flo aioflo==2021.11.0 From d091936ac66e46c1ffc74ffb77a6b722da9550f1 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 6 Dec 2024 06:54:21 -0800 Subject: [PATCH 010/677] Update exception handling for python3.13 for getpass.getuser() (#132449) * Update exception handling for python3.13 for getpass.getuser() * Add comment Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Cleanup trailing space --------- Co-authored-by: Franck Nijhof Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/helpers/system_info.py | 5 ++++- tests/helpers/test_system_info.py | 9 ++++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/homeassistant/helpers/system_info.py b/homeassistant/helpers/system_info.py index df4c45cd5ed..53866428332 100644 --- a/homeassistant/helpers/system_info.py +++ b/homeassistant/helpers/system_info.py @@ -71,7 +71,10 @@ async def async_get_system_info(hass: HomeAssistant) -> dict[str, Any]: try: info_object["user"] = cached_get_user() - except KeyError: + except (KeyError, OSError): + # OSError on python >= 3.13, KeyError on python < 3.13 + # KeyError can be removed when 3.12 support is dropped + # see https://docs.python.org/3/whatsnew/3.13.html info_object["user"] = None if platform.system() == "Darwin": diff --git a/tests/helpers/test_system_info.py b/tests/helpers/test_system_info.py index 16b5b8b652b..2c4b95302fc 100644 --- a/tests/helpers/test_system_info.py +++ b/tests/helpers/test_system_info.py @@ -93,10 +93,9 @@ async def test_container_installationtype(hass: HomeAssistant) -> None: assert info["installation_type"] == "Unsupported Third Party Container" -async def test_getuser_keyerror(hass: HomeAssistant) -> None: - """Test getuser keyerror.""" - with patch( - "homeassistant.helpers.system_info.cached_get_user", side_effect=KeyError - ): +@pytest.mark.parametrize("error", [KeyError, OSError]) +async def test_getuser_oserror(hass: HomeAssistant, error: Exception) -> None: + """Test getuser oserror.""" + with patch("homeassistant.helpers.system_info.cached_get_user", side_effect=error): info = await async_get_system_info(hass) assert info["user"] is None From 56d10a0a7abfb136adf23d19aad5f857bc344329 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Fri, 6 Dec 2024 08:20:06 +0100 Subject: [PATCH 011/677] Bump hass-nabucasa from 0.85.0 to 0.86.0 (#132456) Bump hass-nabucasa fro 0.85.0 to 0.86.0 --- homeassistant/components/cloud/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 60b105b401e..661edb67762 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -8,6 +8,6 @@ "integration_type": "system", "iot_class": "cloud_push", "loggers": ["hass_nabucasa"], - "requirements": ["hass-nabucasa==0.85.0"], + "requirements": ["hass-nabucasa==0.86.0"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 0f94948b0bd..c416207d803 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ fnv-hash-fast==1.0.2 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 habluetooth==3.6.0 -hass-nabucasa==0.85.0 +hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.4 diff --git a/pyproject.toml b/pyproject.toml index 30ebd72f469..07aca0f1741 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ dependencies = [ "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration - "hass-nabucasa==0.85.0", + "hass-nabucasa==0.86.0", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all "httpx==0.27.2", diff --git a/requirements.txt b/requirements.txt index 554d2de0aab..ad3cff221f7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,7 +19,7 @@ bcrypt==4.2.0 certifi>=2021.5.30 ciso8601==2.3.1 fnv-hash-fast==1.0.2 -hass-nabucasa==0.85.0 +hass-nabucasa==0.86.0 httpx==0.27.2 home-assistant-bluetooth==1.13.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 660752d9d24..f44e141de94 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1090,7 +1090,7 @@ habitipy==0.3.3 habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.85.0 +hass-nabucasa==0.86.0 # homeassistant.components.splunk hass-splunk==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index acf19a45832..96c379a77e2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -928,7 +928,7 @@ habitipy==0.3.3 habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.85.0 +hass-nabucasa==0.86.0 # homeassistant.components.conversation hassil==2.0.5 From b1bc35f1c3646464e3a90a2b04d260a6681852eb Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 6 Dec 2024 08:33:05 +0100 Subject: [PATCH 012/677] Fix nordpool dont have previous or next price (#132457) --- homeassistant/components/nordpool/sensor.py | 23 ++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py index e7e655a6657..47617cc8e42 100644 --- a/homeassistant/components/nordpool/sensor.py +++ b/homeassistant/components/nordpool/sensor.py @@ -27,7 +27,9 @@ from .entity import NordpoolBaseEntity PARALLEL_UPDATES = 0 -def get_prices(data: DeliveryPeriodData) -> dict[str, tuple[float, float, float]]: +def get_prices( + data: DeliveryPeriodData, +) -> dict[str, tuple[float | None, float, float | None]]: """Return previous, current and next prices. Output: {"SE3": (10.0, 10.5, 12.1)} @@ -39,6 +41,7 @@ def get_prices(data: DeliveryPeriodData) -> dict[str, tuple[float, float, float] previous_time = current_time - timedelta(hours=1) next_time = current_time + timedelta(hours=1) price_data = data.entries + LOGGER.debug("Price data: %s", price_data) for entry in price_data: if entry.start <= current_time <= entry.end: current_price_entries = entry.entry @@ -46,10 +49,20 @@ def get_prices(data: DeliveryPeriodData) -> dict[str, tuple[float, float, float] last_price_entries = entry.entry if entry.start <= next_time <= entry.end: next_price_entries = entry.entry + LOGGER.debug( + "Last price %s, current price %s, next price %s", + last_price_entries, + current_price_entries, + next_price_entries, + ) result = {} for area, price in current_price_entries.items(): - result[area] = (last_price_entries[area], price, next_price_entries[area]) + result[area] = ( + last_price_entries.get(area), + price, + next_price_entries.get(area), + ) LOGGER.debug("Prices: %s", result) return result @@ -90,7 +103,7 @@ class NordpoolDefaultSensorEntityDescription(SensorEntityDescription): class NordpoolPricesSensorEntityDescription(SensorEntityDescription): """Describes Nord Pool prices sensor entity.""" - value_fn: Callable[[tuple[float, float, float]], float | None] + value_fn: Callable[[tuple[float | None, float, float | None]], float | None] @dataclass(frozen=True, kw_only=True) @@ -136,13 +149,13 @@ PRICES_SENSOR_TYPES: tuple[NordpoolPricesSensorEntityDescription, ...] = ( NordpoolPricesSensorEntityDescription( key="last_price", translation_key="last_price", - value_fn=lambda data: data[0] / 1000, + value_fn=lambda data: data[0] / 1000 if data[0] else None, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="next_price", translation_key="next_price", - value_fn=lambda data: data[2] / 1000, + value_fn=lambda data: data[2] / 1000 if data[2] else None, suggested_display_precision=2, ), ) From 6fe492a51cfbf45a33da77d52857a406a01871e5 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 6 Dec 2024 12:22:05 +0100 Subject: [PATCH 013/677] Bump deebot-client to 9.2.0 (#132467) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 546aba01d90..ad154b8f284 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==9.1.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==9.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index f44e141de94..956ba470706 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -738,7 +738,7 @@ debugpy==1.8.6 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==9.1.0 +deebot-client==9.2.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 96c379a77e2..8038083fff5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -628,7 +628,7 @@ dbus-fast==2.24.3 debugpy==1.8.6 # homeassistant.components.ecovacs -deebot-client==9.1.0 +deebot-client==9.2.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 35873cbe2788fd82576a95d3902e8e9c78412da0 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 6 Dec 2024 11:01:00 +0100 Subject: [PATCH 014/677] Point to the Ecovacs issue in the library for unspoorted devices (#132470) Co-authored-by: Franck Nijhof --- homeassistant/components/ecovacs/controller.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/ecovacs/controller.py b/homeassistant/components/ecovacs/controller.py index 3a70ab2af5b..69dd0f0813f 100644 --- a/homeassistant/components/ecovacs/controller.py +++ b/homeassistant/components/ecovacs/controller.py @@ -99,8 +99,8 @@ class EcovacsController: for device_config in devices.not_supported: _LOGGER.warning( ( - 'Device "%s" not supported. Please add support for it to ' - "https://github.com/DeebotUniverse/client.py: %s" + 'Device "%s" not supported. More information at ' + "https://github.com/DeebotUniverse/client.py/issues/612: %s" ), device_config["deviceName"], device_config, From 32aee614412c06080ca3bc99d9c4c3e56fff2a8f Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Fri, 6 Dec 2024 14:27:52 +0000 Subject: [PATCH 015/677] Bump tplink python-kasa dependency to 0.8.1 (#132472) --- homeassistant/components/tplink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 3f19f50cdb6..6ce46c0d488 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -300,5 +300,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink", "iot_class": "local_polling", "loggers": ["kasa"], - "requirements": ["python-kasa[speedups]==0.8.0"] + "requirements": ["python-kasa[speedups]==0.8.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 956ba470706..83cc4b0f7f8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2362,7 +2362,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.8.0 +python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay python-linkplay==0.0.20 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8038083fff5..cf08e413f32 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1889,7 +1889,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.8.0 +python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay python-linkplay==0.0.20 From df9eb482b56398f1b5d17883bd7720670529c53f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Dec 2024 12:23:07 +0100 Subject: [PATCH 016/677] Bump samsungtvws to 2.7.2 (#132474) --- homeassistant/components/samsungtv/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/samsungtv/manifest.json b/homeassistant/components/samsungtv/manifest.json index 041e9b8fe9b..1a6b5ed5313 100644 --- a/homeassistant/components/samsungtv/manifest.json +++ b/homeassistant/components/samsungtv/manifest.json @@ -37,7 +37,7 @@ "requirements": [ "getmac==0.9.4", "samsungctl[websocket]==0.7.1", - "samsungtvws[async,encrypted]==2.7.1", + "samsungtvws[async,encrypted]==2.7.2", "wakeonlan==2.1.0", "async-upnp-client==0.41.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index 83cc4b0f7f8..78a6cf7a3b0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2610,7 +2610,7 @@ rxv==0.7.0 samsungctl[websocket]==0.7.1 # homeassistant.components.samsungtv -samsungtvws[async,encrypted]==2.7.1 +samsungtvws[async,encrypted]==2.7.2 # homeassistant.components.sanix sanix==1.0.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cf08e413f32..2157c2d7d6d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2086,7 +2086,7 @@ rxv==0.7.0 samsungctl[websocket]==0.7.1 # homeassistant.components.samsungtv -samsungtvws[async,encrypted]==2.7.1 +samsungtvws[async,encrypted]==2.7.2 # homeassistant.components.sanix sanix==1.0.6 From 3b30bbb85e08b845f9254df385137303378cb8b2 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Fri, 6 Dec 2024 12:22:42 +0100 Subject: [PATCH 017/677] Update frontend to 20241127.5 (#132475) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 97a67cbc082..b8033f3f1fd 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.4"] + "requirements": ["home-assistant-frontend==20241127.5"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index c416207d803..0ac34f13485 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.4 +home-assistant-frontend==20241127.5 home-assistant-intents==2024.12.4 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 78a6cf7a3b0..6f460e6b6ed 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1130,7 +1130,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.4 +home-assistant-frontend==20241127.5 # homeassistant.components.conversation home-assistant-intents==2024.12.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2157c2d7d6d..8dec8a5ff70 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -956,7 +956,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.4 +home-assistant-frontend==20241127.5 # homeassistant.components.conversation home-assistant-intents==2024.12.4 From 8827454dbd25fc5c7ced4076a5b2cbab2e3fb253 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Fri, 6 Dec 2024 16:58:09 +0100 Subject: [PATCH 018/677] Update frontend to 20241127.6 (#132494) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index b8033f3f1fd..e68b9312081 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.5"] + "requirements": ["home-assistant-frontend==20241127.6"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 0ac34f13485..9e6d2d58927 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.5 +home-assistant-frontend==20241127.6 home-assistant-intents==2024.12.4 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 6f460e6b6ed..bfc9d4da538 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1130,7 +1130,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.5 +home-assistant-frontend==20241127.6 # homeassistant.components.conversation home-assistant-intents==2024.12.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8dec8a5ff70..eeb99062299 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -956,7 +956,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.5 +home-assistant-frontend==20241127.6 # homeassistant.components.conversation home-assistant-intents==2024.12.4 From 30504fc9bdc3b258a83d04ab657b346a723ce02a Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 6 Dec 2024 07:58:48 -0800 Subject: [PATCH 019/677] Fix google tasks due date timezone handling (#132498) --- homeassistant/components/google_tasks/todo.py | 10 +++-- .../google_tasks/snapshots/test_todo.ambr | 31 ++++++++++++++- tests/components/google_tasks/test_todo.py | 38 ++++++++++++++++++- 3 files changed, 73 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index 5196f89728d..86cb5e09300 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import date, datetime, timedelta +from datetime import UTC, date, datetime, timedelta from typing import Any, cast from homeassistant.components.todo import ( @@ -39,8 +39,10 @@ def _convert_todo_item(item: TodoItem) -> dict[str, str | None]: else: result["status"] = TodoItemStatus.NEEDS_ACTION if (due := item.due) is not None: - # due API field is a timestamp string, but with only date resolution - result["due"] = dt_util.start_of_local_day(due).isoformat() + # due API field is a timestamp string, but with only date resolution. + # The time portion of the date is always discarded by the API, so we + # always set to UTC. + result["due"] = dt_util.start_of_local_day(due).replace(tzinfo=UTC).isoformat() else: result["due"] = None result["notes"] = item.description @@ -51,6 +53,8 @@ def _convert_api_item(item: dict[str, str]) -> TodoItem: """Convert tasks API items into a TodoItem.""" due: date | None = None if (due_str := item.get("due")) is not None: + # Due dates are returned always in UTC so we only need to + # parse the date portion which will be interpreted as a a local date. due = datetime.fromisoformat(due_str).date() return TodoItem( summary=item["title"], diff --git a/tests/components/google_tasks/snapshots/test_todo.ambr b/tests/components/google_tasks/snapshots/test_todo.ambr index 76611ba4a31..f32441354fc 100644 --- a/tests/components/google_tasks/snapshots/test_todo.ambr +++ b/tests/components/google_tasks/snapshots/test_todo.ambr @@ -15,7 +15,7 @@ ) # --- # name: test_create_todo_list_item[due].1 - '{"title": "Soda", "status": "needsAction", "due": "2023-11-18T00:00:00-08:00", "notes": null}' + '{"title": "Soda", "status": "needsAction", "due": "2023-11-18T00:00:00+00:00", "notes": null}' # --- # name: test_create_todo_list_item[summary] tuple( @@ -137,7 +137,7 @@ ) # --- # name: test_partial_update[due_date].1 - '{"title": "Water", "status": "needsAction", "due": "2023-11-18T00:00:00-08:00", "notes": null}' + '{"title": "Water", "status": "needsAction", "due": "2023-11-18T00:00:00+00:00", "notes": null}' # --- # name: test_partial_update[empty_description] tuple( @@ -166,6 +166,33 @@ # name: test_partial_update_status[api_responses0].1 '{"title": "Water", "status": "needsAction", "due": null, "notes": null}' # --- +# name: test_update_due_date[api_responses0-America/Regina] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_update_due_date[api_responses0-America/Regina].1 + '{"title": "Water", "status": "needsAction", "due": "2024-12-05T00:00:00+00:00", "notes": null}' +# --- +# name: test_update_due_date[api_responses0-Asia/Tokyo] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_update_due_date[api_responses0-Asia/Tokyo].1 + '{"title": "Water", "status": "needsAction", "due": "2024-12-05T00:00:00+00:00", "notes": null}' +# --- +# name: test_update_due_date[api_responses0-UTC] + tuple( + 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', + 'PATCH', + ) +# --- +# name: test_update_due_date[api_responses0-UTC].1 + '{"title": "Water", "status": "needsAction", "due": "2024-12-05T00:00:00+00:00", "notes": null}' +# --- # name: test_update_todo_list_item[api_responses0] tuple( 'https://tasks.googleapis.com/tasks/v1/lists/task-list-id-1/tasks/some-task-id?alt=json', diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index b0ee135d4a9..c5ecc0ca2cf 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -239,6 +239,7 @@ def mock_http_response(response_handler: list | Callable) -> Mock: yield mock_response +@pytest.mark.parametrize("timezone", ["America/Regina", "UTC", "Asia/Tokyo"]) @pytest.mark.parametrize( "api_responses", [ @@ -251,7 +252,7 @@ def mock_http_response(response_handler: list | Callable) -> Mock: "title": "Task 1", "status": "needsAction", "position": "0000000000000001", - "due": "2023-11-18T00:00:00+00:00", + "due": "2023-11-18T00:00:00Z", }, { "id": "task-2", @@ -271,8 +272,10 @@ async def test_get_items( integration_setup: Callable[[], Awaitable[bool]], hass_ws_client: WebSocketGenerator, ws_get_items: Callable[[], Awaitable[dict[str, str]]], + timezone: str, ) -> None: """Test getting todo list items.""" + await hass.config.async_set_time_zone(timezone) assert await integration_setup() @@ -484,6 +487,39 @@ async def test_update_todo_list_item( assert call.kwargs.get("body") == snapshot +@pytest.mark.parametrize("timezone", ["America/Regina", "UTC", "Asia/Tokyo"]) +@pytest.mark.parametrize("api_responses", [UPDATE_API_RESPONSES]) +async def test_update_due_date( + hass: HomeAssistant, + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], + mock_http_response: Any, + snapshot: SnapshotAssertion, + timezone: str, +) -> None: + """Test for updating the due date of a To-do item and timezone.""" + await hass.config.async_set_time_zone(timezone) + + assert await integration_setup() + + state = hass.states.get("todo.my_tasks") + assert state + assert state.state == "1" + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_DUE_DATE: "2024-12-5"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, + blocking=True, + ) + assert len(mock_http_response.call_args_list) == 4 + call = mock_http_response.call_args_list[2] + assert call + assert call.args == snapshot + assert call.kwargs.get("body") == snapshot + + @pytest.mark.parametrize( "api_responses", [ From 4884891b2c17bb7aa8a2cc7a2e050172e8c3c148 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 6 Dec 2024 18:54:13 +0100 Subject: [PATCH 020/677] Bump version to 2024.12.1 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index c41ab6ec382..ce9fcf45b76 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 12 -PATCH_VERSION: Final = "0" +PATCH_VERSION: Final = "1" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 07aca0f1741..f4ae0f39ded 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.12.0" +version = "2024.12.1" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From f343dce418a714af66772891e5bd9a3255fd4fa0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Mon, 9 Dec 2024 07:51:03 +0100 Subject: [PATCH 021/677] Enable additional entities on myUplink model SMO20 (#131688) * Add a couple of entities to SMO 20 * Enable additional entities on SMO20 --- homeassistant/components/myuplink/helpers.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/homeassistant/components/myuplink/helpers.py b/homeassistant/components/myuplink/helpers.py index de5486d8dea..bd875d8a872 100644 --- a/homeassistant/components/myuplink/helpers.py +++ b/homeassistant/components/myuplink/helpers.py @@ -95,11 +95,17 @@ PARAMETER_ID_TO_EXCLUDE_F730 = ( ) PARAMETER_ID_TO_INCLUDE_SMO20 = ( + "40013", + "40033", "40940", + "44069", + "44071", + "44073", "47011", "47015", "47028", "47032", + "47398", "50004", ) From 4e56f9c0144d852d9c411202bbe99232acf1b392 Mon Sep 17 00:00:00 2001 From: David Knowles Date: Sun, 1 Dec 2024 15:44:14 -0500 Subject: [PATCH 022/677] Bump pydrawise to 2024.12.0 (#132015) --- homeassistant/components/hydrawise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/hydrawise/manifest.json b/homeassistant/components/hydrawise/manifest.json index 9678dc83e5f..50f803c07dc 100644 --- a/homeassistant/components/hydrawise/manifest.json +++ b/homeassistant/components/hydrawise/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/hydrawise", "iot_class": "cloud_polling", "loggers": ["pydrawise"], - "requirements": ["pydrawise==2024.9.0"] + "requirements": ["pydrawise==2024.12.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index bfc9d4da538..c429d85dc8a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1859,7 +1859,7 @@ pydiscovergy==3.0.2 pydoods==1.0.2 # homeassistant.components.hydrawise -pydrawise==2024.9.0 +pydrawise==2024.12.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index eeb99062299..d649d49fdb5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1500,7 +1500,7 @@ pydexcom==0.2.3 pydiscovergy==3.0.2 # homeassistant.components.hydrawise -pydrawise==2024.9.0 +pydrawise==2024.12.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 From 1f6c5b4d8bdb3cb1e8edda25da9395dc859938aa Mon Sep 17 00:00:00 2001 From: Ravaka Razafimanantsoa <3774520+SeraphicRav@users.noreply.github.com> Date: Mon, 9 Dec 2024 07:35:41 +0900 Subject: [PATCH 023/677] Fix API change for AC not supporting floats in SwitchBot Cloud (#132231) --- homeassistant/components/switchbot_cloud/climate.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/switchbot_cloud/climate.py b/homeassistant/components/switchbot_cloud/climate.py index cd60313f37a..7b1c3415a48 100644 --- a/homeassistant/components/switchbot_cloud/climate.py +++ b/homeassistant/components/switchbot_cloud/climate.py @@ -79,6 +79,8 @@ class SwitchBotCloudAirConditioner(SwitchBotCloudEntity, ClimateEntity): _attr_hvac_mode = HVACMode.FAN_ONLY _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_target_temperature = 21 + _attr_target_temperature_step = 1 + _attr_precision = 1 _attr_name = None _enable_turn_on_off_backwards_compatibility = False @@ -97,7 +99,7 @@ class SwitchBotCloudAirConditioner(SwitchBotCloudEntity, ClimateEntity): ) await self.send_api_command( AirConditionerCommands.SET_ALL, - parameters=f"{new_temperature},{new_mode},{new_fan_speed},on", + parameters=f"{int(new_temperature)},{new_mode},{new_fan_speed},on", ) async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: From d6a4a7f052f2843300f0641641dfe9a821281362 Mon Sep 17 00:00:00 2001 From: Alex Date: Fri, 6 Dec 2024 22:43:57 +0100 Subject: [PATCH 024/677] Update pyrisco to 0.6.5 (#132493) --- homeassistant/components/risco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/risco/manifest.json b/homeassistant/components/risco/manifest.json index c226c1c590d..149b8761589 100644 --- a/homeassistant/components/risco/manifest.json +++ b/homeassistant/components/risco/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/risco", "iot_class": "local_push", "loggers": ["pyrisco"], - "requirements": ["pyrisco==0.6.4"] + "requirements": ["pyrisco==0.6.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index c429d85dc8a..7b52b630fd8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2203,7 +2203,7 @@ pyrecswitch==1.0.2 pyrepetierng==0.1.0 # homeassistant.components.risco -pyrisco==0.6.4 +pyrisco==0.6.5 # homeassistant.components.rituals_perfume_genie pyrituals==0.0.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d649d49fdb5..f545ef00188 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1775,7 +1775,7 @@ pyqwikswitch==0.93 pyrainbird==6.0.1 # homeassistant.components.risco -pyrisco==0.6.4 +pyrisco==0.6.5 # homeassistant.components.rituals_perfume_genie pyrituals==0.0.6 From 5d01f7db859670642c6b430c59681206ce551ce3 Mon Sep 17 00:00:00 2001 From: Erwin Douna Date: Fri, 6 Dec 2024 21:13:26 +0100 Subject: [PATCH 025/677] Fix PyTado dependency (#132510) --- homeassistant/components/tado/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tado/manifest.json b/homeassistant/components/tado/manifest.json index 652d51f0261..b0c00c888b7 100644 --- a/homeassistant/components/tado/manifest.json +++ b/homeassistant/components/tado/manifest.json @@ -14,5 +14,5 @@ }, "iot_class": "cloud_polling", "loggers": ["PyTado"], - "requirements": ["python-tado==0.17.7"] + "requirements": ["python-tado==0.17.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7b52b630fd8..1a18dd523b2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2411,7 +2411,7 @@ python-smarttub==0.0.38 python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.17.7 +python-tado==0.17.6 # homeassistant.components.technove python-technove==1.3.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f545ef00188..7b831cd8ead 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1932,7 +1932,7 @@ python-smarttub==0.0.38 python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.17.7 +python-tado==0.17.6 # homeassistant.components.technove python-technove==1.3.1 From b0005cedff2b2478cfcb98d43f41fe5900cdeb22 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 6 Dec 2024 15:05:27 -0600 Subject: [PATCH 026/677] Bump pycups to 2.0.4 (#132514) --- homeassistant/components/cups/manifest.json | 2 +- requirements_all.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/cups/manifest.json b/homeassistant/components/cups/manifest.json index c4aa596f01e..c8f19236ce7 100644 --- a/homeassistant/components/cups/manifest.json +++ b/homeassistant/components/cups/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/cups", "iot_class": "local_polling", "quality_scale": "legacy", - "requirements": ["pycups==1.9.73"] + "requirements": ["pycups==2.0.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1a18dd523b2..f68c644f31d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1832,7 +1832,7 @@ pycountry==24.6.1 pycsspeechtts==1.0.8 # homeassistant.components.cups -# pycups==1.9.73 +# pycups==2.0.4 # homeassistant.components.daikin pydaikin==2.13.7 From f1284178ed21cd8fae4079f79620cb79a7413cc5 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 6 Dec 2024 23:26:24 +0100 Subject: [PATCH 027/677] Update debugpy to 1.8.8 (#132519) --- homeassistant/components/debugpy/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/debugpy/manifest.json b/homeassistant/components/debugpy/manifest.json index 1e31e002a81..c6e7f79be49 100644 --- a/homeassistant/components/debugpy/manifest.json +++ b/homeassistant/components/debugpy/manifest.json @@ -6,5 +6,5 @@ "integration_type": "service", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["debugpy==1.8.6"] + "requirements": ["debugpy==1.8.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index f68c644f31d..965a44ce16c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -729,7 +729,7 @@ datapoint==0.9.9 dbus-fast==2.24.3 # homeassistant.components.debugpy -debugpy==1.8.6 +debugpy==1.8.8 # homeassistant.components.decora_wifi # decora-wifi==1.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7b831cd8ead..481f97a39ab 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -625,7 +625,7 @@ datapoint==0.9.9 dbus-fast==2.24.3 # homeassistant.components.debugpy -debugpy==1.8.6 +debugpy==1.8.8 # homeassistant.components.ecovacs deebot-client==9.2.0 From af5f718a71eb94a3157ae436b7903ba1345f8ff5 Mon Sep 17 00:00:00 2001 From: Austin Mroczek Date: Sat, 7 Dec 2024 01:43:55 -0800 Subject: [PATCH 028/677] bump total_connect_client to 2023.12 (#132531) --- homeassistant/components/totalconnect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/totalconnect/manifest.json b/homeassistant/components/totalconnect/manifest.json index 87ec14621d9..33306a7adba 100644 --- a/homeassistant/components/totalconnect/manifest.json +++ b/homeassistant/components/totalconnect/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/totalconnect", "iot_class": "cloud_polling", "loggers": ["total_connect_client"], - "requirements": ["total-connect-client==2024.5"] + "requirements": ["total-connect-client==2024.12"] } diff --git a/requirements_all.txt b/requirements_all.txt index 965a44ce16c..58c3fae428c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2858,7 +2858,7 @@ tololib==1.1.0 toonapi==0.3.0 # homeassistant.components.totalconnect -total-connect-client==2024.5 +total-connect-client==2024.12 # homeassistant.components.tplink_lte tp-connected==0.0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 481f97a39ab..0d723aafc3c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2274,7 +2274,7 @@ tololib==1.1.0 toonapi==0.3.0 # homeassistant.components.totalconnect -total-connect-client==2024.5 +total-connect-client==2024.12 # homeassistant.components.tplink_omada tplink-omada-client==1.4.3 From db141ce44977a12edc299894514dc9928db1a105 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Sat, 7 Dec 2024 22:31:11 +0100 Subject: [PATCH 029/677] Bump aiounifi to v81 to fix partitioned cookies on python 3.13 (#132540) --- homeassistant/components/unifi/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index 66d0a53284b..ce573592153 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -7,7 +7,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["aiounifi"], - "requirements": ["aiounifi==80"], + "requirements": ["aiounifi==81"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 58c3fae428c..d46e4a231d9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -402,7 +402,7 @@ aiotedee==0.2.20 aiotractive==0.6.0 # homeassistant.components.unifi -aiounifi==80 +aiounifi==81 # homeassistant.components.vlc_telnet aiovlc==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0d723aafc3c..351313b7088 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -384,7 +384,7 @@ aiotedee==0.2.20 aiotractive==0.6.0 # homeassistant.components.unifi -aiounifi==80 +aiounifi==81 # homeassistant.components.vlc_telnet aiovlc==0.5.1 From 0096ffb659a17a13c9102b9af722840b4a246a37 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 8 Dec 2024 23:30:12 +0100 Subject: [PATCH 030/677] Update twentemilieu to 2.2.0 (#132554) --- homeassistant/components/twentemilieu/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/twentemilieu/manifest.json b/homeassistant/components/twentemilieu/manifest.json index a89091948c2..292887c6c5b 100644 --- a/homeassistant/components/twentemilieu/manifest.json +++ b/homeassistant/components/twentemilieu/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["twentemilieu"], - "requirements": ["twentemilieu==2.1.0"] + "requirements": ["twentemilieu==2.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index d46e4a231d9..097433d07c4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2882,7 +2882,7 @@ ttn_client==1.2.0 tuya-device-sharing-sdk==0.2.1 # homeassistant.components.twentemilieu -twentemilieu==2.1.0 +twentemilieu==2.2.0 # homeassistant.components.twilio twilio==6.32.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 351313b7088..84a6820e71d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2295,7 +2295,7 @@ ttn_client==1.2.0 tuya-device-sharing-sdk==0.2.1 # homeassistant.components.twentemilieu -twentemilieu==2.1.0 +twentemilieu==2.2.0 # homeassistant.components.twilio twilio==6.32.0 From a33c69a2a234d59c27b32474fe8ed59990deee01 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 7 Dec 2024 11:12:58 -0600 Subject: [PATCH 031/677] Bump yalexs-ble to 2.5.2 (#132560) --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yalexs_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 96ed982e4ec..99dbbc0ed9c 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.1"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 50c2a0af457..474ed36e90c 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.1"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"] } diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index c3d1a3d97f1..95d28cd5372 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.1"] + "requirements": ["yalexs-ble==2.5.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 097433d07c4..4a4be451b30 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3044,7 +3044,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.1 +yalexs-ble==2.5.2 # homeassistant.components.august # homeassistant.components.yale diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 84a6820e71d..338cb64868f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2433,7 +2433,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.1 +yalexs-ble==2.5.2 # homeassistant.components.august # homeassistant.components.yale From 26012ac922fa7416a6f504cd98539355b08333b8 Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Sun, 1 Dec 2024 04:01:33 +0100 Subject: [PATCH 032/677] Bump plugwise to v1.6.1 (#131950) --- homeassistant/components/plugwise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index d4d80749a8d..df35777ac54 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==1.6.0"], + "requirements": ["plugwise==1.6.1"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 4a4be451b30..711ea7322d7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1622,7 +1622,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.0 +plugwise==1.6.1 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 338cb64868f..93382902508 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1329,7 +1329,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.0 +plugwise==1.6.1 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 From ef89563badd897d400162bc070c6a438a2e6aaa9 Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Sun, 8 Dec 2024 23:36:55 +0100 Subject: [PATCH 033/677] Bump plugwise to v1.6.2 and adapt (#132608) --- homeassistant/components/plugwise/climate.py | 13 ++----------- homeassistant/components/plugwise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../plugwise/fixtures/m_adam_heating/all_data.json | 2 +- .../plugwise/fixtures/m_adam_jip/all_data.json | 8 ++++---- .../m_adam_multiple_devices_per_zone/all_data.json | 7 ++++++- .../plugwise/snapshots/test_diagnostics.ambr | 7 ++++++- tests/components/plugwise/test_climate.py | 12 ++++-------- 9 files changed, 26 insertions(+), 29 deletions(-) diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index 242b0944782..0cc0a76bd77 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -191,17 +191,8 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): self._previous_action_mode(self.coordinator) # Adam provides the hvac_action for each thermostat - if self._gateway["smile_name"] == "Adam": - if (control_state := self.device.get("control_state")) == "cooling": - return HVACAction.COOLING - if control_state == "heating": - return HVACAction.HEATING - if control_state == "preheating": - return HVACAction.PREHEATING - if control_state == "off": - return HVACAction.IDLE - - return HVACAction.IDLE + if (action := self.device.get("control_state")) is not None: + return HVACAction(action) # Anna heater: str = self._gateway["heater_id"] diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index df35777ac54..d7fcec3bbae 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==1.6.1"], + "requirements": ["plugwise==1.6.2"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 711ea7322d7..6d3ae285f6c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1622,7 +1622,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.1 +plugwise==1.6.2 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 93382902508..15ea88827b0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1329,7 +1329,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.1 +plugwise==1.6.2 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json index fab2cea5fdc..bb24faeebfa 100644 --- a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json @@ -176,7 +176,7 @@ "off" ], "climate_mode": "auto", - "control_state": "off", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Bathroom", diff --git a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json index 4516ce2c2d0..1ca9e77010f 100644 --- a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json @@ -3,7 +3,7 @@ "06aecb3d00354375924f50c47af36bd2": { "active_preset": "no_frost", "climate_mode": "off", - "control_state": "off", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Slaapkamer", @@ -26,7 +26,7 @@ "13228dab8ce04617af318a2888b3c548": { "active_preset": "home", "climate_mode": "heat", - "control_state": "off", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Woonkamer", @@ -238,7 +238,7 @@ "d27aede973b54be484f6842d1b2802ad": { "active_preset": "home", "climate_mode": "heat", - "control_state": "off", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Kinderkamer", @@ -285,7 +285,7 @@ "d58fec52899f4f1c92e4f8fad6d8c48c": { "active_preset": "home", "climate_mode": "heat", - "control_state": "off", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Logeerkamer", diff --git a/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json b/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json index 67e8c235cc3..8da184a7a3e 100644 --- a/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json @@ -32,6 +32,7 @@ "off" ], "climate_mode": "auto", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Badkamer", @@ -66,6 +67,7 @@ "off" ], "climate_mode": "heat", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Bios", @@ -112,6 +114,7 @@ "446ac08dd04d4eff8ac57489757b7314": { "active_preset": "no_frost", "climate_mode": "heat", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Garage", @@ -258,6 +261,7 @@ "off" ], "climate_mode": "auto", + "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Jessie", @@ -402,6 +406,7 @@ "off" ], "climate_mode": "auto", + "control_state": "heating", "dev_class": "climate", "model": "ThermoZone", "name": "Woonkamer", @@ -577,7 +582,7 @@ "cooling_present": false, "gateway_id": "fe799307f1624099878210aa0b9f1475", "heater_id": "90986d591dcd426cae3ec3e8111ff730", - "item_count": 364, + "item_count": 369, "notifications": { "af82e4ccf9c548528166d38e560662a4": { "warning": "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device." diff --git a/tests/components/plugwise/snapshots/test_diagnostics.ambr b/tests/components/plugwise/snapshots/test_diagnostics.ambr index bf7d4260a32..806c92fe7cb 100644 --- a/tests/components/plugwise/snapshots/test_diagnostics.ambr +++ b/tests/components/plugwise/snapshots/test_diagnostics.ambr @@ -34,6 +34,7 @@ 'off', ]), 'climate_mode': 'auto', + 'control_state': 'idle', 'dev_class': 'climate', 'model': 'ThermoZone', 'name': 'Badkamer', @@ -75,6 +76,7 @@ 'off', ]), 'climate_mode': 'heat', + 'control_state': 'idle', 'dev_class': 'climate', 'model': 'ThermoZone', 'name': 'Bios', @@ -131,6 +133,7 @@ '446ac08dd04d4eff8ac57489757b7314': dict({ 'active_preset': 'no_frost', 'climate_mode': 'heat', + 'control_state': 'idle', 'dev_class': 'climate', 'model': 'ThermoZone', 'name': 'Garage', @@ -286,6 +289,7 @@ 'off', ]), 'climate_mode': 'auto', + 'control_state': 'idle', 'dev_class': 'climate', 'model': 'ThermoZone', 'name': 'Jessie', @@ -440,6 +444,7 @@ 'off', ]), 'climate_mode': 'auto', + 'control_state': 'heating', 'dev_class': 'climate', 'model': 'ThermoZone', 'name': 'Woonkamer', @@ -625,7 +630,7 @@ 'cooling_present': False, 'gateway_id': 'fe799307f1624099878210aa0b9f1475', 'heater_id': '90986d591dcd426cae3ec3e8111ff730', - 'item_count': 364, + 'item_count': 369, 'notifications': dict({ 'af82e4ccf9c548528166d38e560662a4': dict({ 'warning': "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device.", diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index c0c1c00c68d..17c4300e685 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -31,15 +31,13 @@ async def test_adam_climate_entity_attributes( state = hass.states.get("climate.woonkamer") assert state assert state.state == HVACMode.AUTO + assert state.attributes["hvac_action"] == "heating" assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] - # hvac_action is not asserted as the fixture is not in line with recent firmware functionality - assert "preset_modes" in state.attributes assert "no_frost" in state.attributes["preset_modes"] assert "home" in state.attributes["preset_modes"] - - assert state.attributes["current_temperature"] == 20.9 assert state.attributes["preset_mode"] == "home" + assert state.attributes["current_temperature"] == 20.9 assert state.attributes["supported_features"] == 17 assert state.attributes["temperature"] == 21.5 assert state.attributes["min_temp"] == 0.0 @@ -49,15 +47,13 @@ async def test_adam_climate_entity_attributes( state = hass.states.get("climate.jessie") assert state assert state.state == HVACMode.AUTO + assert state.attributes["hvac_action"] == "idle" assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] - # hvac_action is not asserted as the fixture is not in line with recent firmware functionality - assert "preset_modes" in state.attributes assert "no_frost" in state.attributes["preset_modes"] assert "home" in state.attributes["preset_modes"] - - assert state.attributes["current_temperature"] == 17.2 assert state.attributes["preset_mode"] == "asleep" + assert state.attributes["current_temperature"] == 17.2 assert state.attributes["temperature"] == 15.0 assert state.attributes["min_temp"] == 0.0 assert state.attributes["max_temp"] == 35.0 From 382d32c7a73d38f269eafbe1b74411853e8953dc Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Sun, 8 Dec 2024 15:59:27 +0100 Subject: [PATCH 034/677] Fix config flow in Husqvarna Automower (#132615) --- homeassistant/components/husqvarna_automower/config_flow.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/config_flow.py b/homeassistant/components/husqvarna_automower/config_flow.py index 4da3bd14089..7efed529453 100644 --- a/homeassistant/components/husqvarna_automower/config_flow.py +++ b/homeassistant/components/husqvarna_automower/config_flow.py @@ -53,10 +53,10 @@ class HusqvarnaConfigFlowHandler( tz = await dt_util.async_get_time_zone(str(dt_util.DEFAULT_TIME_ZONE)) automower_api = AutomowerSession(AsyncConfigFlowAuth(websession, token), tz) try: - data = await automower_api.get_status() + status_data = await automower_api.get_status() except Exception: # noqa: BLE001 return self.async_abort(reason="unknown") - if data == {}: + if status_data == {}: return self.async_abort(reason="no_mower_connected") structured_token = structure_token(token[CONF_ACCESS_TOKEN]) From 1993142e449c1a972844b9425675e96ffb898e92 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Sun, 8 Dec 2024 15:32:39 -0500 Subject: [PATCH 035/677] Bump ZHA dependencies (#132630) --- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 1fbbd83bb9c..3a301be9b02 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.41"], + "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.42"], "usb": [ { "vid": "10C4", diff --git a/requirements_all.txt b/requirements_all.txt index 6d3ae285f6c..6f78faea458 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3081,7 +3081,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.41 +zha==0.0.42 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.13 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 15ea88827b0..15afd06eace 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2464,7 +2464,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.41 +zha==0.0.42 # homeassistant.components.zwave_js zwave-js-server-python==0.59.1 From da344a44e58396079f40d8fbbf140b677da82bff Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Mon, 9 Dec 2024 12:27:15 +0100 Subject: [PATCH 036/677] Bump plugwise to v1.6.3 (#132673) --- homeassistant/components/plugwise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index d7fcec3bbae..60de4496779 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==1.6.2"], + "requirements": ["plugwise==1.6.3"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 6f78faea458..420c11916d9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1622,7 +1622,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.2 +plugwise==1.6.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 15afd06eace..6b02a7d8721 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1329,7 +1329,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.2 +plugwise==1.6.3 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 From 8fc50c776eceb945666e40c503f05d349e4beb37 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 9 Dec 2024 17:09:17 +0100 Subject: [PATCH 037/677] Bump yt-dlp to 2024.12.06 (#132684) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index f85f1561bb9..195dc678bc2 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2024.12.03"], + "requirements": ["yt-dlp[default]==2024.12.06"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 420c11916d9..c0fe66eb215 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3066,7 +3066,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.03 +yt-dlp[default]==2024.12.06 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6b02a7d8721..a19c364ddf6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2452,7 +2452,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.03 +yt-dlp[default]==2024.12.06 # homeassistant.components.zamg zamg==0.3.6 From cac4eef7958efde24072c4a4daff28cfbf63e269 Mon Sep 17 00:00:00 2001 From: Simone Rescio Date: Mon, 9 Dec 2024 17:19:10 +0100 Subject: [PATCH 038/677] Revert "Bump pyezviz to 0.2.2.3" (#132715) --- homeassistant/components/ezviz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ezviz/manifest.json b/homeassistant/components/ezviz/manifest.json index 7c796c74ef7..53976bf3002 100644 --- a/homeassistant/components/ezviz/manifest.json +++ b/homeassistant/components/ezviz/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/ezviz", "iot_class": "cloud_polling", "loggers": ["paho_mqtt", "pyezviz"], - "requirements": ["pyezviz==0.2.2.3"] + "requirements": ["pyezviz==0.2.1.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index c0fe66eb215..ee3df556c35 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1907,7 +1907,7 @@ pyeverlights==0.1.0 pyevilgenius==2.0.0 # homeassistant.components.ezviz -pyezviz==0.2.2.3 +pyezviz==0.2.1.2 # homeassistant.components.fibaro pyfibaro==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a19c364ddf6..57d1b378f62 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1536,7 +1536,7 @@ pyeverlights==0.1.0 pyevilgenius==2.0.0 # homeassistant.components.ezviz -pyezviz==0.2.2.3 +pyezviz==0.2.1.2 # homeassistant.components.fibaro pyfibaro==0.8.0 From c8e5a6df5da3f66856adbebf903aac7c19b053d0 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Mon, 9 Dec 2024 10:08:58 -0600 Subject: [PATCH 039/677] Bump intents to 2024.12.9 (#132726) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- tests/components/conversation/snapshots/test_http.ambr | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 72e1cebf462..41c9a2d2691 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.4"] + "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.9"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 9e6d2d58927..5d84ccd5815 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.6 -home-assistant-intents==2024.12.4 +home-assistant-intents==2024.12.9 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index ee3df556c35..f00f72bfa53 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1133,7 +1133,7 @@ holidays==0.62 home-assistant-frontend==20241127.6 # homeassistant.components.conversation -home-assistant-intents==2024.12.4 +home-assistant-intents==2024.12.9 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 57d1b378f62..f558e120a87 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -959,7 +959,7 @@ holidays==0.62 home-assistant-frontend==20241127.6 # homeassistant.components.conversation -home-assistant-intents==2024.12.4 +home-assistant-intents==2024.12.9 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 100be4fdec9..de58d7b07b5 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.0 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.4 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.9 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index a3edd4fa51c..8023d1ee6fa 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -571,7 +571,7 @@ 'name': 'HassGetState', }), 'match': True, - 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} [in ]', + 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} []', 'slots': dict({ 'area': 'kitchen', 'domain': 'lights', From e23987156645ab1b46877d0ea9989b81d02d4959 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Mon, 9 Dec 2024 17:10:52 +0100 Subject: [PATCH 040/677] Update frontend to 20241127.7 (#132729) Co-authored-by: Franck Nijhof --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index e68b9312081..bfc08c6e11e 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.6"] + "requirements": ["home-assistant-frontend==20241127.7"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 5d84ccd5815..aef46c0ffc6 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.6 +home-assistant-frontend==20241127.7 home-assistant-intents==2024.12.9 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index f00f72bfa53..4e8905765e9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1130,7 +1130,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.6 +home-assistant-frontend==20241127.7 # homeassistant.components.conversation home-assistant-intents==2024.12.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f558e120a87..932c3941486 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -956,7 +956,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.6 +home-assistant-frontend==20241127.7 # homeassistant.components.conversation home-assistant-intents==2024.12.9 From e4765c40fe9313475941a10d2da543ceb137028b Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Mon, 9 Dec 2024 22:53:17 +0100 Subject: [PATCH 041/677] Bump reolink-aio to 0.11.5 (#132757) --- homeassistant/components/reolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 913864a92fa..a14fea6ac0a 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.11.4"] + "requirements": ["reolink-aio==0.11.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4e8905765e9..b7fa39280da 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2556,7 +2556,7 @@ renault-api==0.2.7 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.4 +reolink-aio==0.11.5 # homeassistant.components.idteck_prox rfk101py==0.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 932c3941486..f4c4a06f2e7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2047,7 +2047,7 @@ renault-api==0.2.7 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.4 +reolink-aio==0.11.5 # homeassistant.components.rflink rflink==0.0.66 From 60e8a38ba3e3caa52f649afc57fd6682377bfa89 Mon Sep 17 00:00:00 2001 From: David Knowles Date: Tue, 10 Dec 2024 02:38:34 -0500 Subject: [PATCH 042/677] Catch Hydrawise authorization errors in the correct place (#132727) --- .../components/hydrawise/config_flow.py | 15 ++++--- tests/components/hydrawise/conftest.py | 1 - .../components/hydrawise/test_config_flow.py | 39 +++++++++++++++---- 3 files changed, 41 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index 242763e81e3..419927d6d42 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -6,7 +6,7 @@ from collections.abc import Callable, Mapping from typing import Any from aiohttp import ClientError -from pydrawise import auth, client +from pydrawise import auth as pydrawise_auth, client from pydrawise.exceptions import NotAuthorizedError import voluptuous as vol @@ -29,16 +29,21 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): on_failure: Callable[[str], ConfigFlowResult], ) -> ConfigFlowResult: """Create the config entry.""" - # Verify that the provided credentials work.""" - api = client.Hydrawise(auth.Auth(username, password)) + auth = pydrawise_auth.Auth(username, password) try: - # Don't fetch zones because we don't need them yet. - user = await api.get_user(fetch_zones=False) + await auth.token() except NotAuthorizedError: return on_failure("invalid_auth") except TimeoutError: return on_failure("timeout_connect") + + try: + api = client.Hydrawise(auth) + # Don't fetch zones because we don't need them yet. + user = await api.get_user(fetch_zones=False) + except TimeoutError: + return on_failure("timeout_connect") except ClientError as ex: LOGGER.error("Unable to connect to Hydrawise cloud service: %s", ex) return on_failure("cannot_connect") diff --git a/tests/components/hydrawise/conftest.py b/tests/components/hydrawise/conftest.py index a938322414b..2de7fb1da9a 100644 --- a/tests/components/hydrawise/conftest.py +++ b/tests/components/hydrawise/conftest.py @@ -56,7 +56,6 @@ def mock_legacy_pydrawise( @pytest.fixture def mock_pydrawise( - mock_auth: AsyncMock, user: User, controller: Controller, zones: list[Zone], diff --git a/tests/components/hydrawise/test_config_flow.py b/tests/components/hydrawise/test_config_flow.py index e85b1b9b249..4d25fd5840b 100644 --- a/tests/components/hydrawise/test_config_flow.py +++ b/tests/components/hydrawise/test_config_flow.py @@ -21,6 +21,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def test_form( hass: HomeAssistant, mock_setup_entry: AsyncMock, + mock_auth: AsyncMock, mock_pydrawise: AsyncMock, user: User, ) -> None: @@ -46,11 +47,12 @@ async def test_form( CONF_PASSWORD: "__password__", } assert len(mock_setup_entry.mock_calls) == 1 - mock_pydrawise.get_user.assert_called_once_with(fetch_zones=False) + mock_auth.token.assert_awaited_once_with() + mock_pydrawise.get_user.assert_awaited_once_with(fetch_zones=False) async def test_form_api_error( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock, user: User ) -> None: """Test we handle API errors.""" mock_pydrawise.get_user.side_effect = ClientError("XXX") @@ -71,8 +73,29 @@ async def test_form_api_error( assert result2["type"] is FlowResultType.CREATE_ENTRY -async def test_form_connect_timeout( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User +async def test_form_auth_connect_timeout( + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock +) -> None: + """Test we handle API errors.""" + mock_auth.token.side_effect = TimeoutError + init_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + data = {CONF_USERNAME: "asdf@asdf.com", CONF_PASSWORD: "__password__"} + result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], data + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "timeout_connect"} + + mock_auth.token.reset_mock(side_effect=True) + result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) + assert result2["type"] is FlowResultType.CREATE_ENTRY + + +async def test_form_client_connect_timeout( + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock, user: User ) -> None: """Test we handle API errors.""" mock_pydrawise.get_user.side_effect = TimeoutError @@ -94,10 +117,10 @@ async def test_form_connect_timeout( async def test_form_not_authorized_error( - hass: HomeAssistant, mock_pydrawise: AsyncMock, user: User + hass: HomeAssistant, mock_auth: AsyncMock, mock_pydrawise: AsyncMock ) -> None: """Test we handle API errors.""" - mock_pydrawise.get_user.side_effect = NotAuthorizedError + mock_auth.token.side_effect = NotAuthorizedError init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -109,8 +132,7 @@ async def test_form_not_authorized_error( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_auth"} - mock_pydrawise.get_user.reset_mock(side_effect=True) - mock_pydrawise.get_user.return_value = user + mock_auth.token.reset_mock(side_effect=True) result2 = await hass.config_entries.flow.async_configure(result["flow_id"], data) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -118,6 +140,7 @@ async def test_form_not_authorized_error( async def test_reauth( hass: HomeAssistant, user: User, + mock_auth: AsyncMock, mock_pydrawise: AsyncMock, ) -> None: """Test that re-authorization works.""" From fc34c6181c620c080757a20c00e4d6771848af4c Mon Sep 17 00:00:00 2001 From: David Knowles Date: Tue, 10 Dec 2024 08:23:14 -0500 Subject: [PATCH 043/677] Pass an application identifier to the Hydrawise API (#132779) --- homeassistant/components/hydrawise/__init__.py | 5 +++-- homeassistant/components/hydrawise/config_flow.py | 4 ++-- homeassistant/components/hydrawise/const.py | 4 ++++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/hydrawise/__init__.py b/homeassistant/components/hydrawise/__init__.py index 9e402cd4932..ea5a5801e69 100644 --- a/homeassistant/components/hydrawise/__init__.py +++ b/homeassistant/components/hydrawise/__init__.py @@ -7,7 +7,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from .const import DOMAIN +from .const import APP_ID, DOMAIN from .coordinator import ( HydrawiseMainDataUpdateCoordinator, HydrawiseUpdateCoordinators, @@ -30,7 +30,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b raise ConfigEntryAuthFailed hydrawise = client.Hydrawise( - auth.Auth(config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD]) + auth.Auth(config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD]), + app_id=APP_ID, ) main_coordinator = HydrawiseMainDataUpdateCoordinator(hass, hydrawise) diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index 419927d6d42..5af32af3951 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -13,7 +13,7 @@ import voluptuous as vol from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from .const import DOMAIN, LOGGER +from .const import APP_ID, DOMAIN, LOGGER class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): @@ -39,7 +39,7 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): return on_failure("timeout_connect") try: - api = client.Hydrawise(auth) + api = client.Hydrawise(auth, app_id=APP_ID) # Don't fetch zones because we don't need them yet. user = await api.get_user(fetch_zones=False) except TimeoutError: diff --git a/homeassistant/components/hydrawise/const.py b/homeassistant/components/hydrawise/const.py index 6d846dd6127..beaf450a586 100644 --- a/homeassistant/components/hydrawise/const.py +++ b/homeassistant/components/hydrawise/const.py @@ -3,8 +3,12 @@ from datetime import timedelta import logging +from homeassistant.const import __version__ as HA_VERSION + LOGGER = logging.getLogger(__package__) +APP_ID = f"homeassistant-{HA_VERSION}" + DOMAIN = "hydrawise" DEFAULT_WATERING_TIME = timedelta(minutes=15) From 01a9a5832700fa63ec88689f0d150760273dbe66 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 10 Dec 2024 13:31:22 +0100 Subject: [PATCH 044/677] Bump deebot-client to 9.3.0 (#132834) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index ad154b8f284..b9315e0c1c6 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==9.2.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==9.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index b7fa39280da..b167c45bc41 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -738,7 +738,7 @@ debugpy==1.8.8 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==9.2.0 +deebot-client==9.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f4c4a06f2e7..f0bfe821780 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -628,7 +628,7 @@ dbus-fast==2.24.3 debugpy==1.8.8 # homeassistant.components.ecovacs -deebot-client==9.2.0 +deebot-client==9.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 5a5bb139fa8c33f78bb4953f1125701b92c7330c Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Tue, 10 Dec 2024 14:22:49 +0100 Subject: [PATCH 045/677] Bump aioacaia to 0.1.11 (#132838) --- homeassistant/components/acaia/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json index 3f3e1c14d58..c1f1fdd7a81 100644 --- a/homeassistant/components/acaia/manifest.json +++ b/homeassistant/components/acaia/manifest.json @@ -25,5 +25,5 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aioacaia"], - "requirements": ["aioacaia==0.1.10"] + "requirements": ["aioacaia==0.1.11"] } diff --git a/requirements_all.txt b/requirements_all.txt index b167c45bc41..a8a7185a22a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -173,7 +173,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.10 +aioacaia==0.1.11 # homeassistant.components.airq aioairq==0.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f0bfe821780..adf1c83b236 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -161,7 +161,7 @@ aio-geojson-usgs-earthquakes==0.3 aio-georss-gdacs==0.10 # homeassistant.components.acaia -aioacaia==0.1.10 +aioacaia==0.1.11 # homeassistant.components.airq aioairq==0.4.3 From 238cf691a4fc7c483bb1bfb81bb17b09154a7ed3 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 10 Dec 2024 15:07:18 +0100 Subject: [PATCH 046/677] Bump version to 2024.12.2 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index ce9fcf45b76..412b4b2eb19 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 12 -PATCH_VERSION: Final = "1" +PATCH_VERSION: Final = "2" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index f4ae0f39ded..56347fbd31b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.12.1" +version = "2024.12.2" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 1b300a438931cd080c2d8bbf40d0bef74fd5e933 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Tue, 10 Dec 2024 20:52:39 +0100 Subject: [PATCH 047/677] Set config-flow rule in IQS to todo in Bring integration (#132855) Set config-flow rule in IQS to todo --- homeassistant/components/bring/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/bring/quality_scale.yaml b/homeassistant/components/bring/quality_scale.yaml index 922306930f2..1fdb3f13f1b 100644 --- a/homeassistant/components/bring/quality_scale.yaml +++ b/homeassistant/components/bring/quality_scale.yaml @@ -7,7 +7,7 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: done + config-flow: todo dependency-transparency: done docs-actions: done docs-high-level-description: todo From fb3ffaf18ded9c80a7e3e32d19c030788b745dcd Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 10 Dec 2024 20:59:12 +0100 Subject: [PATCH 048/677] Migrate demo lights to use Kelvin (#132837) * Migrate demo lights to use Kelvin * Adjust google_assistant tests --- homeassistant/components/demo/light.py | 12 ++++++------ tests/components/google_assistant/test_smart_home.py | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/demo/light.py b/homeassistant/components/demo/light.py index c859fef3b76..8bb4e403c3d 100644 --- a/homeassistant/components/demo/light.py +++ b/homeassistant/components/demo/light.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGBW_COLOR, @@ -28,7 +28,7 @@ LIGHT_COLORS = [(56, 86), (345, 75)] LIGHT_EFFECT_LIST = ["rainbow", "none"] -LIGHT_TEMPS = [240, 380] +LIGHT_TEMPS = [4166, 2631] SUPPORT_DEMO = {ColorMode.HS, ColorMode.COLOR_TEMP} SUPPORT_DEMO_HS_WHITE = {ColorMode.HS, ColorMode.WHITE} @@ -185,8 +185,8 @@ class DemoLight(LightEntity): return self._rgbww_color @property - def color_temp(self) -> int: - """Return the CT color temperature.""" + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" return self._ct @property @@ -216,9 +216,9 @@ class DemoLight(LightEntity): if ATTR_BRIGHTNESS in kwargs: self._brightness = kwargs[ATTR_BRIGHTNESS] - if ATTR_COLOR_TEMP in kwargs: + if ATTR_COLOR_TEMP_KELVIN in kwargs: self._color_mode = ColorMode.COLOR_TEMP - self._ct = kwargs[ATTR_COLOR_TEMP] + self._ct = kwargs[ATTR_COLOR_TEMP_KELVIN] if ATTR_EFFECT in kwargs: self._effect = kwargs[ATTR_EFFECT] diff --git a/tests/components/google_assistant/test_smart_home.py b/tests/components/google_assistant/test_smart_home.py index f1b7108c348..c5e17155067 100644 --- a/tests/components/google_assistant/test_smart_home.py +++ b/tests/components/google_assistant/test_smart_home.py @@ -402,7 +402,7 @@ async def test_query_message(hass: HomeAssistant) -> None: light.async_write_ha_state() light2 = DemoLight( - None, "Another Light", state=True, hs_color=(180, 75), ct=400, brightness=78 + None, "Another Light", state=True, hs_color=(180, 75), ct=2500, brightness=78 ) light2.hass = hass light2.entity_id = "light.another_light" @@ -410,7 +410,7 @@ async def test_query_message(hass: HomeAssistant) -> None: light2._attr_name = "Another Light" light2.async_write_ha_state() - light3 = DemoLight(None, "Color temp Light", state=True, ct=400, brightness=200) + light3 = DemoLight(None, "Color temp Light", state=True, ct=2500, brightness=200) light3.hass = hass light3.entity_id = "light.color_temp_light" light3._attr_device_info = None From b46392041f36cc932d0a12eb43af20ecfb7f25db Mon Sep 17 00:00:00 2001 From: Jonas Fors Lellky Date: Tue, 10 Dec 2024 21:44:00 +0100 Subject: [PATCH 049/677] Add model_id to flexit (bacnet) entity (#132875) * Add model_id to flexit (bacnet) entity * Add model to mock --- homeassistant/components/flexit_bacnet/entity.py | 1 + tests/components/flexit_bacnet/conftest.py | 1 + 2 files changed, 2 insertions(+) diff --git a/homeassistant/components/flexit_bacnet/entity.py b/homeassistant/components/flexit_bacnet/entity.py index bd92550db19..38efa838c93 100644 --- a/homeassistant/components/flexit_bacnet/entity.py +++ b/homeassistant/components/flexit_bacnet/entity.py @@ -26,6 +26,7 @@ class FlexitEntity(CoordinatorEntity[FlexitCoordinator]): name=coordinator.device.device_name, manufacturer="Flexit", model="Nordic", + model_id=coordinator.device.model, serial_number=coordinator.device.serial_number, ) diff --git a/tests/components/flexit_bacnet/conftest.py b/tests/components/flexit_bacnet/conftest.py index cc7c9fa0570..a6205bac506 100644 --- a/tests/components/flexit_bacnet/conftest.py +++ b/tests/components/flexit_bacnet/conftest.py @@ -44,6 +44,7 @@ def mock_flexit_bacnet() -> Generator[AsyncMock]: ): flexit_bacnet.serial_number = "0000-0001" flexit_bacnet.device_name = "Device Name" + flexit_bacnet.model = "S4 RER" flexit_bacnet.room_temperature = 19.0 flexit_bacnet.air_temp_setpoint_away = 18.0 flexit_bacnet.air_temp_setpoint_home = 22.0 From 77debcbe8b2c46c85d147ce21274159a2a44803c Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 10 Dec 2024 22:28:30 +0100 Subject: [PATCH 050/677] Update numpy to 2.2.0 (#132874) --- homeassistant/components/compensation/manifest.json | 2 +- homeassistant/components/iqvia/manifest.json | 2 +- homeassistant/components/stream/manifest.json | 2 +- homeassistant/components/tensorflow/manifest.json | 2 +- homeassistant/components/trend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/gen_requirements_all.py | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/compensation/manifest.json b/homeassistant/components/compensation/manifest.json index 5b3cc5ac2ac..ac82938b97b 100644 --- a/homeassistant/components/compensation/manifest.json +++ b/homeassistant/components/compensation/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/compensation", "iot_class": "calculated", "quality_scale": "legacy", - "requirements": ["numpy==2.1.3"] + "requirements": ["numpy==2.2.0"] } diff --git a/homeassistant/components/iqvia/manifest.json b/homeassistant/components/iqvia/manifest.json index 11c99a7428f..0236b72c89d 100644 --- a/homeassistant/components/iqvia/manifest.json +++ b/homeassistant/components/iqvia/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyiqvia"], - "requirements": ["numpy==2.1.3", "pyiqvia==2022.04.0"] + "requirements": ["numpy==2.2.0", "pyiqvia==2022.04.0"] } diff --git a/homeassistant/components/stream/manifest.json b/homeassistant/components/stream/manifest.json index fdf81d99e65..b9368565e2f 100644 --- a/homeassistant/components/stream/manifest.json +++ b/homeassistant/components/stream/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.5", "av==13.1.0", "numpy==2.1.3"] + "requirements": ["PyTurboJPEG==1.7.5", "av==13.1.0", "numpy==2.2.0"] } diff --git a/homeassistant/components/tensorflow/manifest.json b/homeassistant/components/tensorflow/manifest.json index 1ddfa188c0a..16de386b15d 100644 --- a/homeassistant/components/tensorflow/manifest.json +++ b/homeassistant/components/tensorflow/manifest.json @@ -10,7 +10,7 @@ "tensorflow==2.5.0", "tf-models-official==2.5.0", "pycocotools==2.0.6", - "numpy==2.1.3", + "numpy==2.2.0", "Pillow==11.0.0" ] } diff --git a/homeassistant/components/trend/manifest.json b/homeassistant/components/trend/manifest.json index d7981105fd2..85012939fc1 100644 --- a/homeassistant/components/trend/manifest.json +++ b/homeassistant/components/trend/manifest.json @@ -7,5 +7,5 @@ "integration_type": "helper", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["numpy==2.1.3"] + "requirements": ["numpy==2.2.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 932c7439336..726dad56ccb 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -115,7 +115,7 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==2.1.3 +numpy==2.2.0 pandas~=2.2.3 # Constrain multidict to avoid typing issues diff --git a/requirements_all.txt b/requirements_all.txt index ff8950eb65c..872a2123a9c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1494,7 +1494,7 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==2.1.3 +numpy==2.2.0 # homeassistant.components.nyt_games nyt_games==0.4.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 536b67e393b..5b428194aa2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1245,7 +1245,7 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==2.1.3 +numpy==2.2.0 # homeassistant.components.nyt_games nyt_games==0.4.4 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 648798f79c8..fa46710d100 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -148,7 +148,7 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==2.1.3 +numpy==2.2.0 pandas~=2.2.3 # Constrain multidict to avoid typing issues From 355e80aa56cf087f7b5b545e4209b2cb718eea87 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Tue, 10 Dec 2024 19:01:50 -0800 Subject: [PATCH 051/677] Test the google tasks api connection in setup (#132657) Improve google tasks setup --- .../components/google_tasks/__init__.py | 25 +++++--- homeassistant/components/google_tasks/todo.py | 14 ++--- .../components/google_tasks/types.py | 19 ++++++ tests/components/google_tasks/conftest.py | 40 +++++++++++- tests/components/google_tasks/test_init.py | 28 +++++++++ tests/components/google_tasks/test_todo.py | 62 ++----------------- 6 files changed, 115 insertions(+), 73 deletions(-) create mode 100644 homeassistant/components/google_tasks/types.py diff --git a/homeassistant/components/google_tasks/__init__.py b/homeassistant/components/google_tasks/__init__.py index 29a1b20f2bc..2ff22068ca9 100644 --- a/homeassistant/components/google_tasks/__init__.py +++ b/homeassistant/components/google_tasks/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from aiohttp import ClientError, ClientResponseError -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady @@ -12,11 +11,17 @@ from homeassistant.helpers import config_entry_oauth2_flow from . import api from .const import DOMAIN +from .exceptions import GoogleTasksApiError +from .types import GoogleTasksConfigEntry, GoogleTasksData + +__all__ = [ + "DOMAIN", +] PLATFORMS: list[Platform] = [Platform.TODO] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: GoogleTasksConfigEntry) -> bool: """Set up Google Tasks from a config entry.""" implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( @@ -36,16 +41,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except ClientError as err: raise ConfigEntryNotReady from err - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = auth + try: + task_lists = await auth.list_task_lists() + except GoogleTasksApiError as err: + raise ConfigEntryNotReady from err + + entry.runtime_data = GoogleTasksData(auth, task_lists) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: GoogleTasksConfigEntry +) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index 86cb5e09300..d749adbfb2b 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -11,15 +11,13 @@ from homeassistant.components.todo import ( TodoListEntity, TodoListEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt as dt_util -from .api import AsyncConfigEntryAuth -from .const import DOMAIN from .coordinator import TaskUpdateCoordinator +from .types import GoogleTasksConfigEntry SCAN_INTERVAL = timedelta(minutes=15) @@ -69,20 +67,20 @@ def _convert_api_item(item: dict[str, str]) -> TodoItem: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: GoogleTasksConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Google Tasks todo platform.""" - api: AsyncConfigEntryAuth = hass.data[DOMAIN][entry.entry_id] - task_lists = await api.list_task_lists() async_add_entities( ( GoogleTaskTodoListEntity( - TaskUpdateCoordinator(hass, api, task_list["id"]), + TaskUpdateCoordinator(hass, entry.runtime_data.api, task_list["id"]), task_list["title"], entry.entry_id, task_list["id"], ) - for task_list in task_lists + for task_list in entry.runtime_data.task_lists ), True, ) diff --git a/homeassistant/components/google_tasks/types.py b/homeassistant/components/google_tasks/types.py new file mode 100644 index 00000000000..eaaec23ddf5 --- /dev/null +++ b/homeassistant/components/google_tasks/types.py @@ -0,0 +1,19 @@ +"""Types for the Google Tasks integration.""" + +from dataclasses import dataclass +from typing import Any + +from homeassistant.config_entries import ConfigEntry + +from .api import AsyncConfigEntryAuth + + +@dataclass +class GoogleTasksData: + """Class to hold Google Tasks data.""" + + api: AsyncConfigEntryAuth + task_lists: list[dict[str, Any]] + + +type GoogleTasksConfigEntry = ConfigEntry[GoogleTasksData] diff --git a/tests/components/google_tasks/conftest.py b/tests/components/google_tasks/conftest.py index 7db78af6232..e519cac9bdc 100644 --- a/tests/components/google_tasks/conftest.py +++ b/tests/components/google_tasks/conftest.py @@ -1,10 +1,12 @@ """Test fixtures for Google Tasks.""" from collections.abc import Awaitable, Callable +import json import time from typing import Any -from unittest.mock import patch +from unittest.mock import Mock, patch +from httplib2 import Response import pytest from homeassistant.components.application_credentials import ( @@ -24,6 +26,14 @@ FAKE_ACCESS_TOKEN = "some-access-token" FAKE_REFRESH_TOKEN = "some-refresh-token" FAKE_AUTH_IMPL = "conftest-imported-cred" +TASK_LIST = { + "id": "task-list-id-1", + "title": "My tasks", +} +LIST_TASK_LIST_RESPONSE = { + "items": [TASK_LIST], +} + @pytest.fixture def platforms() -> list[Platform]: @@ -89,3 +99,31 @@ async def mock_integration_setup( return result return run + + +@pytest.fixture(name="api_responses") +def mock_api_responses() -> list[dict | list]: + """Fixture forcreate_response_object API responses to return during test.""" + return [] + + +def create_response_object(api_response: dict | list) -> tuple[Response, bytes]: + """Create an http response.""" + return ( + Response({"Content-Type": "application/json"}), + json.dumps(api_response).encode(), + ) + + +@pytest.fixture(name="response_handler") +def mock_response_handler(api_responses: list[dict | list]) -> list: + """Create a mock http2lib response handler.""" + return [create_response_object(api_response) for api_response in api_responses] + + +@pytest.fixture +def mock_http_response(response_handler: list | Callable) -> Mock: + """Fixture to fake out http2lib responses.""" + + with patch("httplib2.Http.request", side_effect=response_handler) as mock_response: + yield mock_response diff --git a/tests/components/google_tasks/test_init.py b/tests/components/google_tasks/test_init.py index 1fe0e4a0c36..4bb2bd1eed7 100644 --- a/tests/components/google_tasks/test_init.py +++ b/tests/components/google_tasks/test_init.py @@ -2,8 +2,11 @@ from collections.abc import Awaitable, Callable import http +from http import HTTPStatus import time +from unittest.mock import Mock +from httplib2 import Response import pytest from homeassistant.components.google_tasks import DOMAIN @@ -11,15 +14,19 @@ from homeassistant.components.google_tasks.const import OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from .conftest import LIST_TASK_LIST_RESPONSE + from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.parametrize("api_responses", [[LIST_TASK_LIST_RESPONSE]]) async def test_setup( hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], config_entry: MockConfigEntry, setup_credentials: None, + mock_http_response: Mock, ) -> None: """Test successful setup and unload.""" assert config_entry.state is ConfigEntryState.NOT_LOADED @@ -35,12 +42,14 @@ async def test_setup( @pytest.mark.parametrize("expires_at", [time.time() - 3600], ids=["expired"]) +@pytest.mark.parametrize("api_responses", [[LIST_TASK_LIST_RESPONSE]]) async def test_expired_token_refresh_success( hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], aioclient_mock: AiohttpClientMocker, config_entry: MockConfigEntry, setup_credentials: None, + mock_http_response: Mock, ) -> None: """Test expired token is refreshed.""" @@ -98,3 +107,22 @@ async def test_expired_token_refresh_failure( await integration_setup() assert config_entry.state is expected_state + + +@pytest.mark.parametrize( + "response_handler", + [ + ([(Response({"status": HTTPStatus.INTERNAL_SERVER_ERROR}), b"")]), + ], +) +async def test_setup_error( + hass: HomeAssistant, + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], + mock_http_response: Mock, + config_entry: MockConfigEntry, +) -> None: + """Test an error returned by the server when setting up the platform.""" + + assert not await integration_setup() + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index c5ecc0ca2cf..c713b9fd44f 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable from http import HTTPStatus import json from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import Mock from httplib2 import Response import pytest @@ -23,16 +23,11 @@ from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from .conftest import LIST_TASK_LIST_RESPONSE, create_response_object + from tests.typing import WebSocketGenerator ENTITY_ID = "todo.my_tasks" -ITEM = { - "id": "task-list-id-1", - "title": "My tasks", -} -LIST_TASK_LIST_RESPONSE = { - "items": [ITEM], -} EMPTY_RESPONSE = {} LIST_TASKS_RESPONSE = { "items": [], @@ -149,20 +144,6 @@ async def ws_get_items( return get -@pytest.fixture(name="api_responses") -def mock_api_responses() -> list[dict | list]: - """Fixture for API responses to return during test.""" - return [] - - -def create_response_object(api_response: dict | list) -> tuple[Response, bytes]: - """Create an http response.""" - return ( - Response({"Content-Type": "application/json"}), - json.dumps(api_response).encode(), - ) - - def create_batch_response_object( content_ids: list[str], api_responses: list[dict | list | Response | None] ) -> tuple[Response, bytes]: @@ -225,18 +206,10 @@ def create_batch_response_handler( return _handler -@pytest.fixture(name="response_handler") -def mock_response_handler(api_responses: list[dict | list]) -> list: - """Create a mock http2lib response handler.""" - return [create_response_object(api_response) for api_response in api_responses] - - @pytest.fixture(autouse=True) -def mock_http_response(response_handler: list | Callable) -> Mock: - """Fixture to fake out http2lib responses.""" - - with patch("httplib2.Http.request", side_effect=response_handler) as mock_response: - yield mock_response +def setup_http_response(mock_http_response: Mock) -> None: + """Fixture to load the http response mock.""" + return @pytest.mark.parametrize("timezone", ["America/Regina", "UTC", "Asia/Tokyo"]) @@ -303,29 +276,6 @@ async def test_get_items( assert state.state == "1" -@pytest.mark.parametrize( - "response_handler", - [ - ([(Response({"status": HTTPStatus.INTERNAL_SERVER_ERROR}), b"")]), - ], -) -async def test_list_items_server_error( - hass: HomeAssistant, - setup_credentials: None, - integration_setup: Callable[[], Awaitable[bool]], - hass_ws_client: WebSocketGenerator, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], -) -> None: - """Test an error returned by the server when setting up the platform.""" - - assert await integration_setup() - - await hass_ws_client(hass) - - state = hass.states.get("todo.my_tasks") - assert state is None - - @pytest.mark.parametrize( "api_responses", [ From 73feeacc396021d05b6611dad93bb442dfa55cc0 Mon Sep 17 00:00:00 2001 From: Chris Talkington Date: Tue, 10 Dec 2024 23:55:58 -0600 Subject: [PATCH 052/677] Use runtime_data for roku (#132781) * use runtime_data for roku * unload cleanup * tweaks * tweaks * fix tests * fix tests * Update config_flow.py * Update config_flow.py --- homeassistant/components/roku/__init__.py | 16 ++++++++-------- homeassistant/components/roku/binary_sensor.py | 9 +++------ homeassistant/components/roku/config_flow.py | 10 +++------- homeassistant/components/roku/diagnostics.py | 14 +++++--------- homeassistant/components/roku/media_player.py | 9 +++------ homeassistant/components/roku/remote.py | 10 +++------- homeassistant/components/roku/select.py | 13 +++++-------- homeassistant/components/roku/sensor.py | 10 +++------- tests/components/roku/test_init.py | 9 +-------- 9 files changed, 34 insertions(+), 66 deletions(-) diff --git a/homeassistant/components/roku/__init__.py b/homeassistant/components/roku/__init__.py index b318a91e4c7..e6b92d91335 100644 --- a/homeassistant/components/roku/__init__.py +++ b/homeassistant/components/roku/__init__.py @@ -6,7 +6,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from .const import CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID, DOMAIN +from .const import CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID from .coordinator import RokuDataUpdateCoordinator PLATFORMS = [ @@ -17,8 +17,10 @@ PLATFORMS = [ Platform.SENSOR, ] +type RokuConfigEntry = ConfigEntry[RokuDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: RokuConfigEntry) -> bool: """Set up Roku from a config entry.""" if (device_id := entry.unique_id) is None: device_id = entry.entry_id @@ -33,7 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -42,13 +44,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: RokuConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_reload_entry(hass: HomeAssistant, entry: RokuConfigEntry) -> None: """Reload the config entry when it changed.""" await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/roku/binary_sensor.py b/homeassistant/components/roku/binary_sensor.py index 0f5f29f63f6..cd51c30c250 100644 --- a/homeassistant/components/roku/binary_sensor.py +++ b/homeassistant/components/roku/binary_sensor.py @@ -11,12 +11,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import RokuConfigEntry from .entity import RokuEntity @@ -56,15 +55,13 @@ BINARY_SENSORS: tuple[RokuBinarySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Roku binary sensors based on a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( RokuBinarySensorEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=description, ) for description in BINARY_SENSORS diff --git a/homeassistant/components/roku/config_flow.py b/homeassistant/components/roku/config_flow.py index 18e3b3ed68a..b92ff819701 100644 --- a/homeassistant/components/roku/config_flow.py +++ b/homeassistant/components/roku/config_flow.py @@ -10,16 +10,12 @@ from rokuecp import Roku, RokuError import voluptuous as vol from homeassistant.components import ssdp, zeroconf -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession +from . import RokuConfigEntry from .const import CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID, DOMAIN DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) @@ -164,7 +160,7 @@ class RokuConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: RokuConfigEntry, ) -> RokuOptionsFlowHandler: """Create the options flow.""" return RokuOptionsFlowHandler() diff --git a/homeassistant/components/roku/diagnostics.py b/homeassistant/components/roku/diagnostics.py index 6c6809ee33a..e98837ca442 100644 --- a/homeassistant/components/roku/diagnostics.py +++ b/homeassistant/components/roku/diagnostics.py @@ -4,25 +4,21 @@ from __future__ import annotations from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, entry: RokuConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id] - return { "entry": { "data": { - **config_entry.data, + **entry.data, }, - "unique_id": config_entry.unique_id, + "unique_id": entry.unique_id, }, - "data": coordinator.data.as_dict(), + "data": entry.runtime_data.data.as_dict(), } diff --git a/homeassistant/components/roku/media_player.py b/homeassistant/components/roku/media_player.py index 35f01553cdd..d43d62c9438 100644 --- a/homeassistant/components/roku/media_player.py +++ b/homeassistant/components/roku/media_player.py @@ -23,13 +23,13 @@ from homeassistant.components.media_player import ( async_process_play_media_url, ) from homeassistant.components.stream import FORMAT_CONTENT_TYPE, HLS_PROVIDER -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import VolDictType +from . import RokuConfigEntry from .browse_media import async_browse_media from .const import ( ATTR_ARTIST_NAME, @@ -38,7 +38,6 @@ from .const import ( ATTR_KEYWORD, ATTR_MEDIA_TYPE, ATTR_THUMBNAIL, - DOMAIN, SERVICE_SEARCH, ) from .coordinator import RokuDataUpdateCoordinator @@ -83,15 +82,13 @@ SEARCH_SCHEMA: VolDictType = {vol.Required(ATTR_KEYWORD): str} async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the Roku config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( [ RokuMediaPlayer( - coordinator=coordinator, + coordinator=entry.runtime_data, ) ], True, diff --git a/homeassistant/components/roku/remote.py b/homeassistant/components/roku/remote.py index fa351e021e8..9a31f9fd7a0 100644 --- a/homeassistant/components/roku/remote.py +++ b/homeassistant/components/roku/remote.py @@ -6,28 +6,24 @@ from collections.abc import Iterable from typing import Any from homeassistant.components.remote import ATTR_NUM_REPEATS, RemoteEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry from .entity import RokuEntity from .helpers import roku_exception_handler async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Load Roku remote based on a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( [ RokuRemote( - coordinator=coordinator, + coordinator=entry.runtime_data, ) ], True, diff --git a/homeassistant/components/roku/select.py b/homeassistant/components/roku/select.py index 5f3b9d4049b..6977f8c0d24 100644 --- a/homeassistant/components/roku/select.py +++ b/homeassistant/components/roku/select.py @@ -9,12 +9,10 @@ from rokuecp import Roku from rokuecp.models import Device as RokuDevice from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry from .entity import RokuEntity from .helpers import format_channel_name, roku_exception_handler @@ -108,16 +106,15 @@ CHANNEL_ENTITY = RokuSelectEntityDescription( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Roku select based on a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - device: RokuDevice = coordinator.data + device: RokuDevice = entry.runtime_data.data entities: list[RokuSelectEntity] = [ RokuSelectEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=description, ) for description in ENTITIES @@ -126,7 +123,7 @@ async def async_setup_entry( if len(device.channels) > 0: entities.append( RokuSelectEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=CHANNEL_ENTITY, ) ) diff --git a/homeassistant/components/roku/sensor.py b/homeassistant/components/roku/sensor.py index ed134cc4c2a..56a84ead402 100644 --- a/homeassistant/components/roku/sensor.py +++ b/homeassistant/components/roku/sensor.py @@ -8,13 +8,11 @@ from dataclasses import dataclass from rokuecp.models import Device as RokuDevice from homeassistant.components.sensor import SensorEntity, SensorEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN -from .coordinator import RokuDataUpdateCoordinator +from . import RokuConfigEntry from .entity import RokuEntity @@ -43,15 +41,13 @@ SENSORS: tuple[RokuSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Roku sensor based on a config entry.""" - coordinator: RokuDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - async_add_entities( RokuSensorEntity( - coordinator=coordinator, + coordinator=entry.runtime_data, description=description, ) for description in SENSORS diff --git a/tests/components/roku/test_init.py b/tests/components/roku/test_init.py index a4fc8477ac3..9c414bcf62a 100644 --- a/tests/components/roku/test_init.py +++ b/tests/components/roku/test_init.py @@ -4,7 +4,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from rokuecp import RokuConnectionError -from homeassistant.components.roku.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -38,12 +37,7 @@ async def test_config_entry_no_unique_id( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.LOADED - assert ( - hass.data[DOMAIN][mock_config_entry.entry_id].device_id - == mock_config_entry.entry_id - ) async def test_load_unload_config_entry( @@ -56,10 +50,9 @@ async def test_load_unload_config_entry( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.LOADED await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id not in hass.data[DOMAIN] + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED From 9f40074d6635d8917b2c87d4037e9ec4b686cc73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ludovic=20BOU=C3=89?= Date: Wed, 11 Dec 2024 07:36:09 +0100 Subject: [PATCH 053/677] Fix typo in water heater integration (#132891) Fix typo in water heater componant --- homeassistant/components/water_heater/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 43a9364e59d..67ce3a97fd1 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -56,7 +56,7 @@ STATE_GAS = "gas" class WaterHeaterEntityFeature(IntFlag): - """Supported features of the fan entity.""" + """Supported features of the water heater entity.""" TARGET_TEMPERATURE = 1 OPERATION_MODE = 2 From f0f0b4b8fa2f1bb04385c9a076adb61d5cae32e4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 08:24:25 +0100 Subject: [PATCH 054/677] Bump github/codeql-action from 3.27.6 to 3.27.7 (#132900) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 5b8ac94e570..8f6e393f853 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.27.6 + uses: github/codeql-action/init@v3.27.7 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.27.6 + uses: github/codeql-action/analyze@v3.27.7 with: category: "/language:python" From 4ff41ed2f800e1f04922278f04e498791c972eda Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 08:42:48 +0100 Subject: [PATCH 055/677] Refactor light significant change to use kelvin attribute (#132853) --- homeassistant/components/light/significant_change.py | 10 +++++----- tests/components/light/test_significant_change.py | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/light/significant_change.py b/homeassistant/components/light/significant_change.py index 1877c925622..773b7a6b898 100644 --- a/homeassistant/components/light/significant_change.py +++ b/homeassistant/components/light/significant_change.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.significant_change import check_absolute_change -from . import ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_HS_COLOR +from . import ATTR_BRIGHTNESS, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR @callback @@ -44,10 +44,10 @@ def async_check_significant_change( return True if check_absolute_change( - # Default range 153..500 - old_attrs.get(ATTR_COLOR_TEMP), - new_attrs.get(ATTR_COLOR_TEMP), - 5, + # Default range 2000..6500 + old_attrs.get(ATTR_COLOR_TEMP_KELVIN), + new_attrs.get(ATTR_COLOR_TEMP_KELVIN), + 50, ): return True diff --git a/tests/components/light/test_significant_change.py b/tests/components/light/test_significant_change.py index 87a60b58325..cf03f37228e 100644 --- a/tests/components/light/test_significant_change.py +++ b/tests/components/light/test_significant_change.py @@ -2,7 +2,7 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ) @@ -26,10 +26,10 @@ async def test_significant_change() -> None: # Color temp assert not async_check_significant_change( - None, "on", {ATTR_COLOR_TEMP: 60}, "on", {ATTR_COLOR_TEMP: 64} + None, "on", {ATTR_COLOR_TEMP_KELVIN: 2000}, "on", {ATTR_COLOR_TEMP_KELVIN: 2049} ) assert async_check_significant_change( - None, "on", {ATTR_COLOR_TEMP: 60}, "on", {ATTR_COLOR_TEMP: 65} + None, "on", {ATTR_COLOR_TEMP_KELVIN: 2000}, "on", {ATTR_COLOR_TEMP_KELVIN: 2050} ) # Effect From 5e1772156856c8c1114acdb3b1a1064a3925672f Mon Sep 17 00:00:00 2001 From: shapournemati-iotty <130070037+shapournemati-iotty@users.noreply.github.com> Date: Wed, 11 Dec 2024 08:53:19 +0100 Subject: [PATCH 056/677] Remove old codeowner no longer working on the integration (#132807) --- CODEOWNERS | 4 ++-- homeassistant/components/iotty/manifest.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 3a407308275..03b0e7b893b 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -727,8 +727,8 @@ build.json @home-assistant/supervisor /tests/components/ios/ @robbiet480 /homeassistant/components/iotawatt/ @gtdiehl @jyavenard /tests/components/iotawatt/ @gtdiehl @jyavenard -/homeassistant/components/iotty/ @pburgio @shapournemati-iotty -/tests/components/iotty/ @pburgio @shapournemati-iotty +/homeassistant/components/iotty/ @shapournemati-iotty +/tests/components/iotty/ @shapournemati-iotty /homeassistant/components/iperf3/ @rohankapoorcom /homeassistant/components/ipma/ @dgomes /tests/components/ipma/ @dgomes diff --git a/homeassistant/components/iotty/manifest.json b/homeassistant/components/iotty/manifest.json index 1c0d5cc3df2..db81f7c5839 100644 --- a/homeassistant/components/iotty/manifest.json +++ b/homeassistant/components/iotty/manifest.json @@ -1,7 +1,7 @@ { "domain": "iotty", "name": "iotty", - "codeowners": ["@pburgio", "@shapournemati-iotty"], + "codeowners": ["@shapournemati-iotty"], "config_flow": true, "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/iotty", From af838077ccad92ba77a9ecff0f3e6b1dcf180c5e Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 11 Dec 2024 08:55:00 +0100 Subject: [PATCH 057/677] Fix docker hassfest (#132823) --- .github/workflows/builder.yml | 2 +- script/gen_requirements_all.py | 1 - script/hassfest/__main__.py | 14 +++++----- script/hassfest/docker.py | 6 ++--- script/hassfest/docker/entrypoint.sh | 26 ++++++++++++++----- script/hassfest/model.py | 6 ++++- script/hassfest/quality_scale.py | 2 +- .../quality_scale_validation/__init__.py | 4 +-- .../config_entry_unloading.py | 6 +++-- .../quality_scale_validation/config_flow.py | 6 +++-- .../quality_scale_validation/diagnostics.py | 6 +++-- .../quality_scale_validation/discovery.py | 6 +++-- .../parallel_updates.py | 6 +++-- .../reauthentication_flow.py | 6 +++-- .../reconfiguration_flow.py | 6 +++-- .../quality_scale_validation/runtime_data.py | 6 +++-- .../quality_scale_validation/strict_typing.py | 13 ++++++---- .../unique_config_entry.py | 6 +++-- tests/hassfest/test_requirements.py | 3 +-- tests/hassfest/test_version.py | 3 +-- 20 files changed, 85 insertions(+), 49 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 9d3ab18f7c1..8f419cca1da 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -517,7 +517,7 @@ jobs: tags: ${{ env.HASSFEST_IMAGE_TAG }} - name: Run hassfest against core - run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components + run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace - name: Push Docker image if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index fa46710d100..5cc609eec2a 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -628,7 +628,6 @@ def _get_hassfest_config() -> Config: specific_integrations=None, action="validate", requirements=True, - core_integrations_path=Path("homeassistant/components"), ) diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index 81670de5afd..c93d8fd4499 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -110,10 +110,10 @@ def get_config() -> Config: help="Comma-separate list of plugins to run. Valid plugin names: %(default)s", ) parser.add_argument( - "--core-integrations-path", + "--core-path", type=Path, - default=Path("homeassistant/components"), - help="Path to core integrations", + default=Path(), + help="Path to core", ) parsed = parser.parse_args() @@ -125,16 +125,18 @@ def get_config() -> Config: "Generate is not allowed when limiting to specific integrations" ) - if not parsed.integration_path and not Path("requirements_all.txt").is_file(): + if ( + not parsed.integration_path + and not (parsed.core_path / "requirements_all.txt").is_file() + ): raise RuntimeError("Run from Home Assistant root") return Config( - root=Path().absolute(), + root=parsed.core_path.absolute(), specific_integrations=parsed.integration_path, action=parsed.action, requirements=parsed.requirements, plugins=set(parsed.plugins), - core_integrations_path=parsed.core_integrations_path, ) diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index 57d86bc4def..022caee30cd 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -185,12 +185,12 @@ def _generate_files(config: Config) -> list[File]: + 10 ) * 1000 - package_versions = _get_package_versions(Path("requirements.txt"), {"uv"}) + package_versions = _get_package_versions(config.root / "requirements.txt", {"uv"}) package_versions |= _get_package_versions( - Path("requirements_test.txt"), {"pipdeptree", "tqdm"} + config.root / "requirements_test.txt", {"pipdeptree", "tqdm"} ) package_versions |= _get_package_versions( - Path("requirements_test_pre_commit.txt"), {"ruff"} + config.root / "requirements_test_pre_commit.txt", {"ruff"} ) return [ diff --git a/script/hassfest/docker/entrypoint.sh b/script/hassfest/docker/entrypoint.sh index 7b75eb186d2..eabc08a9499 100755 --- a/script/hassfest/docker/entrypoint.sh +++ b/script/hassfest/docker/entrypoint.sh @@ -2,16 +2,28 @@ integrations="" integration_path="" +core_path_provided=false -# Enable recursive globbing using find -for manifest in $(find . -name "manifest.json"); do - manifest_path=$(realpath "${manifest}") - integrations="$integrations --integration-path ${manifest_path%/*}" +for arg in "$@"; do + case "$arg" in + --core-path=*) + core_path_provided=true + break + ;; + esac done -if [ -z "$integrations" ]; then - echo "Error: No integrations found!" - exit 1 +if [ "$core_path_provided" = false ]; then + # Enable recursive globbing using find + for manifest in $(find . -name "manifest.json"); do + manifest_path=$(realpath "${manifest}") + integrations="$integrations --integration-path ${manifest_path%/*}" + done + + if [ -z "$integrations" ]; then + echo "Error: No integrations found!" + exit 1 + fi fi cd /usr/src/homeassistant || exit 1 diff --git a/script/hassfest/model.py b/script/hassfest/model.py index 377f82b0d5c..08ded687096 100644 --- a/script/hassfest/model.py +++ b/script/hassfest/model.py @@ -30,11 +30,15 @@ class Config: root: pathlib.Path action: Literal["validate", "generate"] requirements: bool - core_integrations_path: pathlib.Path + core_integrations_path: pathlib.Path = field(init=False) errors: list[Error] = field(default_factory=list) cache: dict[str, Any] = field(default_factory=dict) plugins: set[str] = field(default_factory=set) + def __post_init__(self) -> None: + """Post init.""" + self.core_integrations_path = self.root / "homeassistant/components" + def add_error(self, *args: Any, **kwargs: Any) -> None: """Add an error.""" self.errors.append(Error(*args, **kwargs)) diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 72f01f3d1d1..5a09f8c7bd8 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -1358,7 +1358,7 @@ def validate_iqs_file(config: Config, integration: Integration) -> None: for rule_name in rules_done: if (validator := VALIDATORS.get(rule_name)) and ( - errors := validator.validate(integration, rules_done=rules_done) + errors := validator.validate(config, integration, rules_done=rules_done) ): for error in errors: integration.add_error("quality_scale", f"[{rule_name}] {error}") diff --git a/script/hassfest/quality_scale_validation/__init__.py b/script/hassfest/quality_scale_validation/__init__.py index 892bb70fabd..7c41a58b601 100644 --- a/script/hassfest/quality_scale_validation/__init__.py +++ b/script/hassfest/quality_scale_validation/__init__.py @@ -2,14 +2,14 @@ from typing import Protocol -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration class RuleValidationProtocol(Protocol): """Protocol for rule validation.""" def validate( - self, integration: Integration, *, rules_done: set[str] + self, config: Config, integration: Integration, *, rules_done: set[str] ) -> list[str] | None: """Validate a quality scale rule. diff --git a/script/hassfest/quality_scale_validation/config_entry_unloading.py b/script/hassfest/quality_scale_validation/config_entry_unloading.py index fb636a7f2ed..4874ddc4625 100644 --- a/script/hassfest/quality_scale_validation/config_entry_unloading.py +++ b/script/hassfest/quality_scale_validation/config_entry_unloading.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/c import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration def _has_unload_entry_function(module: ast.Module) -> bool: @@ -17,7 +17,9 @@ def _has_unload_entry_function(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration has a config flow.""" init_file = integration.path / "__init__.py" diff --git a/script/hassfest/quality_scale_validation/config_flow.py b/script/hassfest/quality_scale_validation/config_flow.py index 6e88aa462f4..d1ac70ab469 100644 --- a/script/hassfest/quality_scale_validation/config_flow.py +++ b/script/hassfest/quality_scale_validation/config_flow.py @@ -3,10 +3,12 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/config-flow/ """ -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration implements config flow.""" if not integration.config_flow: diff --git a/script/hassfest/quality_scale_validation/diagnostics.py b/script/hassfest/quality_scale_validation/diagnostics.py index 44012208bcb..ea143002b09 100644 --- a/script/hassfest/quality_scale_validation/diagnostics.py +++ b/script/hassfest/quality_scale_validation/diagnostics.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/d import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration DIAGNOSTICS_FUNCTIONS = { "async_get_config_entry_diagnostics", @@ -22,7 +22,9 @@ def _has_diagnostics_function(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration implements diagnostics.""" diagnostics_file = integration.path / "diagnostics.py" diff --git a/script/hassfest/quality_scale_validation/discovery.py b/script/hassfest/quality_scale_validation/discovery.py index db50cdba55a..d11bcaf2cec 100644 --- a/script/hassfest/quality_scale_validation/discovery.py +++ b/script/hassfest/quality_scale_validation/discovery.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/d import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration MANIFEST_KEYS = [ "bluetooth", @@ -38,7 +38,9 @@ def _has_discovery_function(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration implements diagnostics.""" config_flow_file = integration.path / "config_flow.py" diff --git a/script/hassfest/quality_scale_validation/parallel_updates.py b/script/hassfest/quality_scale_validation/parallel_updates.py index 3483a44f504..00ad891774d 100644 --- a/script/hassfest/quality_scale_validation/parallel_updates.py +++ b/script/hassfest/quality_scale_validation/parallel_updates.py @@ -7,7 +7,7 @@ import ast from homeassistant.const import Platform from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration def _has_parallel_updates_defined(module: ast.Module) -> bool: @@ -18,7 +18,9 @@ def _has_parallel_updates_defined(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration sets PARALLEL_UPDATES constant.""" errors = [] diff --git a/script/hassfest/quality_scale_validation/reauthentication_flow.py b/script/hassfest/quality_scale_validation/reauthentication_flow.py index 81d34ec4f7f..3db9700af98 100644 --- a/script/hassfest/quality_scale_validation/reauthentication_flow.py +++ b/script/hassfest/quality_scale_validation/reauthentication_flow.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/r import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration def _has_step_reauth_function(module: ast.Module) -> bool: @@ -17,7 +17,9 @@ def _has_step_reauth_function(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration has a reauthentication flow.""" config_flow_file = integration.path / "config_flow.py" diff --git a/script/hassfest/quality_scale_validation/reconfiguration_flow.py b/script/hassfest/quality_scale_validation/reconfiguration_flow.py index b27475e8c70..28cc0ef6d43 100644 --- a/script/hassfest/quality_scale_validation/reconfiguration_flow.py +++ b/script/hassfest/quality_scale_validation/reconfiguration_flow.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/r import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration def _has_step_reconfigure_function(module: ast.Module) -> bool: @@ -17,7 +17,9 @@ def _has_step_reconfigure_function(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration has a reconfiguration flow.""" config_flow_file = integration.path / "config_flow.py" diff --git a/script/hassfest/quality_scale_validation/runtime_data.py b/script/hassfest/quality_scale_validation/runtime_data.py index 8ad721a218c..cfc4c5224de 100644 --- a/script/hassfest/quality_scale_validation/runtime_data.py +++ b/script/hassfest/quality_scale_validation/runtime_data.py @@ -8,7 +8,7 @@ import re from homeassistant.const import Platform from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration _ANNOTATION_MATCH = re.compile(r"^[A-Za-z]+ConfigEntry$") _FUNCTIONS: dict[str, dict[str, int]] = { @@ -102,7 +102,9 @@ def _check_typed_config_entry(integration: Integration) -> list[str]: return errors -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate correct use of ConfigEntry.runtime_data.""" init_file = integration.path / "__init__.py" init = ast_parse_module(init_file) diff --git a/script/hassfest/quality_scale_validation/strict_typing.py b/script/hassfest/quality_scale_validation/strict_typing.py index a7755b6bb40..a27ab752cf0 100644 --- a/script/hassfest/quality_scale_validation/strict_typing.py +++ b/script/hassfest/quality_scale_validation/strict_typing.py @@ -7,27 +7,30 @@ from functools import lru_cache from pathlib import Path import re -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration _STRICT_TYPING_FILE = Path(".strict-typing") _COMPONENT_REGEX = r"homeassistant.components.([^.]+).*" @lru_cache -def _strict_typing_components() -> set[str]: +def _strict_typing_components(strict_typing_file: Path) -> set[str]: return set( { match.group(1) - for line in _STRICT_TYPING_FILE.read_text(encoding="utf-8").splitlines() + for line in strict_typing_file.read_text(encoding="utf-8").splitlines() if (match := re.match(_COMPONENT_REGEX, line)) is not None } ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration has strict typing enabled.""" + strict_typing_file = config.root / _STRICT_TYPING_FILE - if integration.domain not in _strict_typing_components(): + if integration.domain not in _strict_typing_components(strict_typing_file): return [ "Integration does not have strict typing enabled " "(is missing from .strict-typing)" diff --git a/script/hassfest/quality_scale_validation/unique_config_entry.py b/script/hassfest/quality_scale_validation/unique_config_entry.py index 8c38923e584..83b3d20bd80 100644 --- a/script/hassfest/quality_scale_validation/unique_config_entry.py +++ b/script/hassfest/quality_scale_validation/unique_config_entry.py @@ -6,7 +6,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/u import ast from script.hassfest import ast_parse_module -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration def _has_method_call(module: ast.Module, name: str) -> bool: @@ -30,7 +30,9 @@ def _has_abort_unique_id_configured(module: ast.Module) -> bool: ) -def validate(integration: Integration, *, rules_done: set[str]) -> list[str] | None: +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: """Validate that the integration prevents duplicate devices.""" if integration.manifest.get("single_config_entry"): diff --git a/tests/hassfest/test_requirements.py b/tests/hassfest/test_requirements.py index e70bee104c9..b9259596c65 100644 --- a/tests/hassfest/test_requirements.py +++ b/tests/hassfest/test_requirements.py @@ -12,13 +12,12 @@ from script.hassfest.requirements import validate_requirements_format def integration(): """Fixture for hassfest integration model.""" return Integration( - path=Path("homeassistant/components/test"), + path=Path("homeassistant/components/test").absolute(), _config=Config( root=Path(".").absolute(), specific_integrations=None, action="validate", requirements=True, - core_integrations_path=Path("homeassistant/components"), ), _manifest={ "domain": "test", diff --git a/tests/hassfest/test_version.py b/tests/hassfest/test_version.py index 30677356101..20c3d93bda5 100644 --- a/tests/hassfest/test_version.py +++ b/tests/hassfest/test_version.py @@ -16,13 +16,12 @@ from script.hassfest.model import Config, Integration def integration(): """Fixture for hassfest integration model.""" integration = Integration( - "", + Path(), _config=Config( root=Path(".").absolute(), specific_integrations=None, action="validate", requirements=True, - core_integrations_path=Path("homeassistant/components"), ), ) integration._manifest = { From b780f31e63abbde7224bec6b2ab2cacc156516d0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 08:55:23 +0100 Subject: [PATCH 058/677] Migrate flux to use Kelvin over Mireds (#132839) --- homeassistant/components/flux/switch.py | 17 +++++++---------- tests/components/flux/test_switch.py | 4 ++-- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/flux/switch.py b/homeassistant/components/flux/switch.py index 8a3d7ec7260..f7cf5b2c03a 100644 --- a/homeassistant/components/flux/switch.py +++ b/homeassistant/components/flux/switch.py @@ -13,7 +13,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -43,7 +43,6 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import slugify from homeassistant.util.color import ( color_RGB_to_xy_brightness, - color_temperature_kelvin_to_mired, color_temperature_to_rgb, ) from homeassistant.util.dt import as_local, utcnow as dt_utcnow @@ -109,13 +108,13 @@ async def async_set_lights_xy(hass, lights, x_val, y_val, brightness, transition await hass.services.async_call(LIGHT_DOMAIN, SERVICE_TURN_ON, service_data) -async def async_set_lights_temp(hass, lights, mired, brightness, transition): +async def async_set_lights_temp(hass, lights, kelvin, brightness, transition): """Set color of array of lights.""" for light in lights: if is_on(hass, light): service_data = {ATTR_ENTITY_ID: light} - if mired is not None: - service_data[ATTR_COLOR_TEMP] = int(mired) + if kelvin is not None: + service_data[ATTR_COLOR_TEMP_KELVIN] = kelvin if brightness is not None: service_data[ATTR_BRIGHTNESS] = brightness if transition is not None: @@ -350,17 +349,15 @@ class FluxSwitch(SwitchEntity, RestoreEntity): now, ) else: - # Convert to mired and clamp to allowed values - mired = color_temperature_kelvin_to_mired(temp) await async_set_lights_temp( - self.hass, self._lights, mired, brightness, self._transition + self.hass, self._lights, int(temp), brightness, self._transition ) _LOGGER.debug( ( - "Lights updated to mired:%s brightness:%s, %s%% " + "Lights updated to kelvin:%s brightness:%s, %s%% " "of %s cycle complete at %s" ), - mired, + temp, brightness, round(percentage_complete * 100), time_state, diff --git a/tests/components/flux/test_switch.py b/tests/components/flux/test_switch.py index ab0e8a556c4..f7dc30db240 100644 --- a/tests/components/flux/test_switch.py +++ b/tests/components/flux/test_switch.py @@ -1164,7 +1164,7 @@ async def test_flux_with_multiple_lights( assert call.data[light.ATTR_XY_COLOR] == [0.46, 0.376] -async def test_flux_with_mired( +async def test_flux_with_temp( hass: HomeAssistant, mock_light_entities: list[MockLight], ) -> None: @@ -1224,7 +1224,7 @@ async def test_flux_with_mired( async_fire_time_changed(hass, test_time) await hass.async_block_till_done() call = turn_on_calls[-1] - assert call.data[light.ATTR_COLOR_TEMP] == 269 + assert call.data[light.ATTR_COLOR_TEMP_KELVIN] == 3708 async def test_flux_with_rgb( From 2bb05296b8fa46b8b67967d8186ee9c50977f9f9 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 11 Dec 2024 09:46:53 +0100 Subject: [PATCH 059/677] Add remaining test coverage to yale_smart_alarm (#132869) --- .../test_alarm_control_panel.py | 123 +++++++++++++++++- 1 file changed, 121 insertions(+), 2 deletions(-) diff --git a/tests/components/yale_smart_alarm/test_alarm_control_panel.py b/tests/components/yale_smart_alarm/test_alarm_control_panel.py index 4e8330df071..0280223b72a 100644 --- a/tests/components/yale_smart_alarm/test_alarm_control_panel.py +++ b/tests/components/yale_smart_alarm/test_alarm_control_panel.py @@ -2,16 +2,27 @@ from __future__ import annotations +from copy import deepcopy from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion +from yalesmartalarmclient import YaleSmartAlarmData -from homeassistant.const import Platform +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + SERVICE_ALARM_ARM_HOME, + SERVICE_ALARM_DISARM, + AlarmControlPanelState, +) +from homeassistant.const import ATTR_CODE, ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform @pytest.mark.parametrize( @@ -27,3 +38,111 @@ async def test_alarm_control_panel( """Test the Yale Smart Alarm alarm_control_panel.""" entry = load_config_entry[0] await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +@pytest.mark.parametrize( + "load_platforms", + [[Platform.ALARM_CONTROL_PANEL]], +) +async def test_alarm_control_panel_service_calls( + hass: HomeAssistant, + get_data: YaleSmartAlarmData, + load_config_entry: tuple[MockConfigEntry, Mock], +) -> None: + """Test the Yale Smart Alarm alarm_control_panel action calls.""" + + client = load_config_entry[1] + + data = deepcopy(get_data.cycle) + data["data"] = data["data"].pop("device_status") + + client.auth.get_authenticated = Mock(return_value=data) + client.disarm = Mock(return_value=True) + client.arm_partial = Mock(return_value=True) + client.arm_full = Mock(return_value=True) + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + client.disarm.assert_called_once() + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.DISARMED + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_HOME, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + client.arm_partial.assert_called_once() + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_HOME + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + client.arm_full.assert_called_once() + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + client.disarm = Mock(side_effect=ConnectionError("no connection")) + + with pytest.raises( + HomeAssistantError, + match="Could not set alarm for test-username: no connection", + ): + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + client.disarm = Mock(return_value=False) + + with pytest.raises( + HomeAssistantError, + match="Could not change alarm, check system ready for arming", + ): + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: "alarm_control_panel.test_username", ATTR_CODE: "123456"}, + blocking=True, + ) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + +@pytest.mark.parametrize( + "load_platforms", + [[Platform.ALARM_CONTROL_PANEL]], +) +async def test_alarm_control_panel_not_available( + hass: HomeAssistant, + get_data: YaleSmartAlarmData, + load_config_entry: tuple[MockConfigEntry, Mock], + freezer: FrozenDateTimeFactory, +) -> None: + """Test the Yale Smart Alarm alarm_control_panel not being available.""" + + client = load_config_entry[1] + client.get_armed_status = Mock(return_value=None) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == AlarmControlPanelState.ARMED_AWAY + + freezer.tick(3600) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("alarm_control_panel.test_username") + assert state.state == STATE_UNAVAILABLE From 7ef3e92e2d4568ab07855ab8a2134733773ae69a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 09:57:29 +0100 Subject: [PATCH 060/677] Migrate tasmota lights to use Kelvin (#132798) --- homeassistant/components/tasmota/light.py | 38 ++++++++++++++++------- 1 file changed, 26 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/tasmota/light.py b/homeassistant/components/tasmota/light.py index 9b69ee60524..a06e77eceb1 100644 --- a/homeassistant/components/tasmota/light.py +++ b/homeassistant/components/tasmota/light.py @@ -18,7 +18,7 @@ from hatasmota.models import DiscoveryHashType from homeassistant.components import light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, @@ -32,6 +32,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from .const import DATA_REMOVE_DISCOVER_COMPONENT from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW @@ -199,19 +200,27 @@ class TasmotaLight( return self._color_mode @property - def color_temp(self) -> int | None: - """Return the color temperature in mired.""" - return self._color_temp + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return ( + color_util.color_temperature_mired_to_kelvin(self._color_temp) + if self._color_temp + else None + ) @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self._tasmota_entity.min_mireds + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self._tasmota_entity.min_mireds + ) @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self._tasmota_entity.max_mireds + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self._tasmota_entity.max_mireds + ) @property def effect(self) -> str | None: @@ -255,8 +264,13 @@ class TasmotaLight( if ATTR_BRIGHTNESS in kwargs and brightness_supported(supported_color_modes): attributes["brightness"] = scale_brightness(kwargs[ATTR_BRIGHTNESS]) - if ATTR_COLOR_TEMP in kwargs and ColorMode.COLOR_TEMP in supported_color_modes: - attributes["color_temp"] = int(kwargs[ATTR_COLOR_TEMP]) + if ( + ATTR_COLOR_TEMP_KELVIN in kwargs + and ColorMode.COLOR_TEMP in supported_color_modes + ): + attributes["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if ATTR_EFFECT in kwargs: attributes["effect"] = kwargs[ATTR_EFFECT] From 9c9e82a93e052431954e1908ca8ddc0268b470d8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 09:58:08 +0100 Subject: [PATCH 061/677] Migrate zha lights to use Kelvin (#132816) --- homeassistant/components/zha/light.py | 43 +++++++++++++++++++-------- 1 file changed, 31 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/zha/light.py b/homeassistant/components/zha/light.py index 9a22dfb02e9..2f5d9e9e4c9 100644 --- a/homeassistant/components/zha/light.py +++ b/homeassistant/components/zha/light.py @@ -15,7 +15,7 @@ from zha.application.platforms.light.const import ( from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_TRANSITION, @@ -29,6 +29,7 @@ from homeassistant.const import STATE_ON, Platform from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import color as color_util from .entity import ZHAEntity from .helpers import ( @@ -128,14 +129,18 @@ class Light(LightEntity, ZHAEntity): return self.entity_data.entity.brightness @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" - return self.entity_data.entity.min_mireds + def max_color_temp_kelvin(self) -> int: + """Return the coldest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self.entity_data.entity.min_mireds + ) @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" - return self.entity_data.entity.max_mireds + def min_color_temp_kelvin(self) -> int: + """Return the warmest color_temp_kelvin that this light supports.""" + return color_util.color_temperature_mired_to_kelvin( + self.entity_data.entity.max_mireds + ) @property def xy_color(self) -> tuple[float, float] | None: @@ -143,9 +148,13 @@ class Light(LightEntity, ZHAEntity): return self.entity_data.entity.xy_color @property - def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" - return self.entity_data.entity.color_temp + def color_temp_kelvin(self) -> int | None: + """Return the color temperature value in Kelvin.""" + return ( + color_util.color_temperature_mired_to_kelvin(mireds) + if (mireds := self.entity_data.entity.color_temp) + else None + ) @property def color_mode(self) -> ColorMode | None: @@ -167,12 +176,17 @@ class Light(LightEntity, ZHAEntity): @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" + color_temp = ( + color_util.color_temperature_kelvin_to_mired(color_temp_k) + if (color_temp_k := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) + else None + ) await self.entity_data.entity.async_turn_on( transition=kwargs.get(ATTR_TRANSITION), brightness=kwargs.get(ATTR_BRIGHTNESS), effect=kwargs.get(ATTR_EFFECT), flash=kwargs.get(ATTR_FLASH), - color_temp=kwargs.get(ATTR_COLOR_TEMP), + color_temp=color_temp, xy_color=kwargs.get(ATTR_XY_COLOR), ) self.async_write_ha_state() @@ -188,12 +202,17 @@ class Light(LightEntity, ZHAEntity): @callback def restore_external_state_attributes(self, state: State) -> None: """Restore entity state.""" + color_temp = ( + color_util.color_temperature_kelvin_to_mired(color_temp_k) + if (color_temp_k := state.attributes.get(ATTR_COLOR_TEMP_KELVIN)) + else None + ) self.entity_data.entity.restore_external_state_attributes( state=(state.state == STATE_ON), off_with_transition=state.attributes.get(OFF_WITH_TRANSITION), off_brightness=state.attributes.get(OFF_BRIGHTNESS), brightness=state.attributes.get(ATTR_BRIGHTNESS), - color_temp=state.attributes.get(ATTR_COLOR_TEMP), + color_temp=color_temp, xy_color=state.attributes.get(ATTR_XY_COLOR), color_mode=( HA_TO_ZHA_COLOR_MODE[ColorMode(state.attributes[ATTR_COLOR_MODE])] From 0e8961276fed60a7892945625e1e10b66820d459 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 11 Dec 2024 10:50:42 +0100 Subject: [PATCH 062/677] Enable pydantic.v1 mypy plugin (#132907) --- homeassistant/components/bang_olufsen/const.py | 16 ++++++++-------- homeassistant/components/bang_olufsen/entity.py | 2 +- .../components/bang_olufsen/media_player.py | 14 +++++++------- homeassistant/components/google/__init__.py | 4 ++-- homeassistant/components/google/calendar.py | 10 +++++----- homeassistant/components/google/coordinator.py | 4 ++-- mypy.ini | 2 +- script/hassfest/mypy_config.py | 7 ++++++- 8 files changed, 32 insertions(+), 27 deletions(-) diff --git a/homeassistant/components/bang_olufsen/const.py b/homeassistant/components/bang_olufsen/const.py index 7f87ce11097..9f0649e610b 100644 --- a/homeassistant/components/bang_olufsen/const.py +++ b/homeassistant/components/bang_olufsen/const.py @@ -137,7 +137,7 @@ VALID_MEDIA_TYPES: Final[tuple] = ( # Fallback sources to use in case of API failure. FALLBACK_SOURCES: Final[SourceArray] = SourceArray( items=[ - Source( # type: ignore[call-arg] + Source( id="uriStreamer", is_enabled=True, is_playable=True, @@ -145,7 +145,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="uriStreamer"), is_seekable=False, ), - Source( # type: ignore[call-arg] + Source( id="bluetooth", is_enabled=True, is_playable=True, @@ -153,7 +153,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="bluetooth"), is_seekable=False, ), - Source( # type: ignore[call-arg] + Source( id="spotify", is_enabled=True, is_playable=True, @@ -161,7 +161,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="spotify"), is_seekable=True, ), - Source( # type: ignore[call-arg] + Source( id="lineIn", is_enabled=True, is_playable=True, @@ -169,7 +169,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="lineIn"), is_seekable=False, ), - Source( # type: ignore[call-arg] + Source( id="spdif", is_enabled=True, is_playable=True, @@ -177,7 +177,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="spdif"), is_seekable=False, ), - Source( # type: ignore[call-arg] + Source( id="netRadio", is_enabled=True, is_playable=True, @@ -185,7 +185,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="netRadio"), is_seekable=False, ), - Source( # type: ignore[call-arg] + Source( id="deezer", is_enabled=True, is_playable=True, @@ -193,7 +193,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray( type=SourceTypeEnum(value="deezer"), is_seekable=True, ), - Source( # type: ignore[call-arg] + Source( id="tidalConnect", is_enabled=True, is_playable=True, diff --git a/homeassistant/components/bang_olufsen/entity.py b/homeassistant/components/bang_olufsen/entity.py index 77fe7c6a1ff..8ed68da1678 100644 --- a/homeassistant/components/bang_olufsen/entity.py +++ b/homeassistant/components/bang_olufsen/entity.py @@ -42,7 +42,7 @@ class BangOlufsenBase: # Objects that get directly updated by notifications. self._playback_metadata: PlaybackContentMetadata = PlaybackContentMetadata() - self._playback_progress: PlaybackProgress = PlaybackProgress(total_duration=0) # type: ignore[call-arg] + self._playback_progress: PlaybackProgress = PlaybackProgress(total_duration=0) self._playback_source: Source = Source() self._playback_state: RenderingState = RenderingState() self._source_change: Source = Source() diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index d8b7a1bf940..282ecdd2ae5 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -210,9 +210,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Misc. variables. self._audio_sources: dict[str, str] = {} self._media_image: Art = Art() - self._software_status: SoftwareUpdateStatus = SoftwareUpdateStatus( # type: ignore[call-arg] + self._software_status: SoftwareUpdateStatus = SoftwareUpdateStatus( software_version="", - state=SoftwareUpdateState(seconds_remaining=0, value="idle"), # type: ignore[call-arg] + state=SoftwareUpdateState(seconds_remaining=0, value="idle"), ) self._sources: dict[str, str] = {} self._state: str = MediaPlayerState.IDLE @@ -896,9 +896,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): elif media_type == BangOlufsenMediaType.RADIO: await self._client.run_provided_scene( - scene_properties=SceneProperties( # type: ignore[call-arg] + scene_properties=SceneProperties( action_list=[ - Action( # type: ignore[call-arg] + Action( type="radio", radio_station_id=media_id, ) @@ -919,7 +919,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): deezer_id = kwargs[ATTR_MEDIA_EXTRA]["id"] await self._client.start_deezer_flow( - user_flow=UserFlow(user_id=deezer_id) # type: ignore[call-arg] + user_flow=UserFlow(user_id=deezer_id) ) # Play a playlist or album. @@ -929,7 +929,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): start_from = kwargs[ATTR_MEDIA_EXTRA]["start_from"] await self._client.add_to_queue( - play_queue_item=PlayQueueItem( # type: ignore[call-arg] + play_queue_item=PlayQueueItem( provider=PlayQueueItemType(value=media_type), start_now_from_position=start_from, type="playlist", @@ -940,7 +940,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Play a track. else: await self._client.add_to_queue( - play_queue_item=PlayQueueItem( # type: ignore[call-arg] + play_queue_item=PlayQueueItem( provider=PlayQueueItemType(value=media_type), start_now_from_position=0, type="track", diff --git a/homeassistant/components/google/__init__.py b/homeassistant/components/google/__init__.py index 1d204883579..2ad400aabab 100644 --- a/homeassistant/components/google/__init__.py +++ b/homeassistant/components/google/__init__.py @@ -277,10 +277,10 @@ async def async_setup_add_event_service( elif EVENT_START_DATETIME in call.data and EVENT_END_DATETIME in call.data: start_dt = call.data[EVENT_START_DATETIME] end_dt = call.data[EVENT_END_DATETIME] - start = DateOrDatetime( # type: ignore[call-arg] + start = DateOrDatetime( date_time=start_dt, timezone=str(hass.config.time_zone) ) - end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) # type: ignore[call-arg] + end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) if start is None or end is None: raise ValueError( diff --git a/homeassistant/components/google/calendar.py b/homeassistant/components/google/calendar.py index 045e0e31b46..5ac5dae616c 100644 --- a/homeassistant/components/google/calendar.py +++ b/homeassistant/components/google/calendar.py @@ -272,7 +272,7 @@ async def async_setup_entry( entity_description.search, ) else: - request_template = SyncEventsRequest( # type: ignore[call-arg] + request_template = SyncEventsRequest( calendar_id=calendar_id, start_time=dt_util.now() + SYNC_EVENT_MIN_TIME, ) @@ -437,11 +437,11 @@ class GoogleCalendarEntity( start: DateOrDatetime end: DateOrDatetime if isinstance(dtstart, datetime): - start = DateOrDatetime( # type: ignore[call-arg] + start = DateOrDatetime( date_time=dt_util.as_local(dtstart), timezone=str(dt_util.get_default_time_zone()), ) - end = DateOrDatetime( # type: ignore[call-arg] + end = DateOrDatetime( date_time=dt_util.as_local(dtend), timezone=str(dt_util.get_default_time_zone()), ) @@ -543,8 +543,8 @@ async def async_create_event(entity: GoogleCalendarEntity, call: ServiceCall) -> elif EVENT_START_DATETIME in call.data and EVENT_END_DATETIME in call.data: start_dt = call.data[EVENT_START_DATETIME] end_dt = call.data[EVENT_END_DATETIME] - start = DateOrDatetime(date_time=start_dt, timezone=str(hass.config.time_zone)) # type: ignore[call-arg] - end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) # type: ignore[call-arg] + start = DateOrDatetime(date_time=start_dt, timezone=str(hass.config.time_zone)) + end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone)) if start is None or end is None: raise ValueError("Missing required fields to set start or end date/datetime") diff --git a/homeassistant/components/google/coordinator.py b/homeassistant/components/google/coordinator.py index 06f33782479..19198041c05 100644 --- a/homeassistant/components/google/coordinator.py +++ b/homeassistant/components/google/coordinator.py @@ -131,7 +131,7 @@ class CalendarQueryUpdateCoordinator(DataUpdateCoordinator[list[Event]]): self, start_date: datetime, end_date: datetime ) -> Iterable[Event]: """Get all events in a specific time frame.""" - request = ListEventsRequest( # type: ignore[call-arg] + request = ListEventsRequest( calendar_id=self.calendar_id, start_time=start_date, end_time=end_date, @@ -149,7 +149,7 @@ class CalendarQueryUpdateCoordinator(DataUpdateCoordinator[list[Event]]): async def _async_update_data(self) -> list[Event]: """Fetch data from API endpoint.""" - request = ListEventsRequest(calendar_id=self.calendar_id, search=self._search) # type: ignore[call-arg] + request = ListEventsRequest(calendar_id=self.calendar_id, search=self._search) try: result = await self.calendar_service.async_list_events(request) except ApiException as err: diff --git a/mypy.ini b/mypy.ini index fb58810515b..4e5d4212ee9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -5,7 +5,7 @@ [mypy] python_version = 3.12 platform = linux -plugins = pydantic.mypy +plugins = pydantic.mypy, pydantic.v1.mypy show_error_codes = true follow_imports = normal local_partial_types = true diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index ec4d4b3d3a9..5767066c943 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -33,7 +33,12 @@ HEADER: Final = """ GENERAL_SETTINGS: Final[dict[str, str]] = { "python_version": ".".join(str(x) for x in REQUIRED_PYTHON_VER[:2]), "platform": "linux", - "plugins": "pydantic.mypy", + "plugins": ", ".join( # noqa: FLY002 + [ + "pydantic.mypy", + "pydantic.v1.mypy", + ] + ), "show_error_codes": "true", "follow_imports": "normal", # "enable_incomplete_feature": ", ".join( # noqa: FLY002 From beda2737212bc8ac365eaeaf28e24e83565b4978 Mon Sep 17 00:00:00 2001 From: shapournemati-iotty <130070037+shapournemati-iotty@users.noreply.github.com> Date: Wed, 11 Dec 2024 10:52:47 +0100 Subject: [PATCH 063/677] upgrade iottycloud lib to 0.3.0 (#132836) --- homeassistant/components/iotty/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/iotty/manifest.json b/homeassistant/components/iotty/manifest.json index db81f7c5839..5425ce3b480 100644 --- a/homeassistant/components/iotty/manifest.json +++ b/homeassistant/components/iotty/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/iotty", "integration_type": "device", "iot_class": "cloud_polling", - "requirements": ["iottycloud==0.2.1"] + "requirements": ["iottycloud==0.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 872a2123a9c..bf6b5bbaeec 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1207,7 +1207,7 @@ insteon-frontend-home-assistant==0.5.0 intellifire4py==4.1.9 # homeassistant.components.iotty -iottycloud==0.2.1 +iottycloud==0.3.0 # homeassistant.components.iperf3 iperf3==0.1.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5b428194aa2..5d8a15bc202 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1018,7 +1018,7 @@ insteon-frontend-home-assistant==0.5.0 intellifire4py==4.1.9 # homeassistant.components.iotty -iottycloud==0.2.1 +iottycloud==0.3.0 # homeassistant.components.isal isal==1.7.1 From b26583b0bf501bc229403a2cc7b7de08cb9c6b96 Mon Sep 17 00:00:00 2001 From: Simon Lamon <32477463+silamon@users.noreply.github.com> Date: Wed, 11 Dec 2024 11:12:05 +0100 Subject: [PATCH 064/677] Bump python-linkplay to v0.1.1 (#132091) --- homeassistant/components/linkplay/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/linkplay/test_diagnostics.py | 6 ++++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index e74d22b8207..cc124ceb611 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["linkplay"], - "requirements": ["python-linkplay==0.0.20"], + "requirements": ["python-linkplay==0.1.1"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index bf6b5bbaeec..b263779e67f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2368,7 +2368,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay -python-linkplay==0.0.20 +python-linkplay==0.1.1 # homeassistant.components.lirc # python-lirc==1.2.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5d8a15bc202..d641a0fa4e2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1898,7 +1898,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay -python-linkplay==0.0.20 +python-linkplay==0.1.1 # homeassistant.components.matter python-matter-server==6.6.0 diff --git a/tests/components/linkplay/test_diagnostics.py b/tests/components/linkplay/test_diagnostics.py index 369142978a3..de60b7ecb3a 100644 --- a/tests/components/linkplay/test_diagnostics.py +++ b/tests/components/linkplay/test_diagnostics.py @@ -31,8 +31,10 @@ async def test_diagnostics( patch.object(LinkPlayMultiroom, "update_status", return_value=None), ): endpoints = [ - LinkPlayApiEndpoint(protocol="https", endpoint=HOST, session=None), - LinkPlayApiEndpoint(protocol="http", endpoint=HOST, session=None), + LinkPlayApiEndpoint( + protocol="https", port=443, endpoint=HOST, session=None + ), + LinkPlayApiEndpoint(protocol="http", port=80, endpoint=HOST, session=None), ] for endpoint in endpoints: mock_session.get( From dc8b7cfede78891d44c86c16a454582116cea9ed Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 11 Dec 2024 11:51:16 +0100 Subject: [PATCH 065/677] Allow bytearray for mqtt payload type (#132906) --- homeassistant/components/mqtt/client.py | 2 +- homeassistant/components/mqtt/switch.py | 2 +- homeassistant/helpers/service_info/mqtt.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index d8bc0862d29..0091d2370a4 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -119,7 +119,7 @@ MAX_PACKETS_TO_READ = 500 type SocketType = socket.socket | ssl.SSLSocket | mqtt.WebsocketWrapper | Any -type SubscribePayloadType = str | bytes # Only bytes if encoding is None +type SubscribePayloadType = str | bytes | bytearray # Only bytes if encoding is None def publish( diff --git a/homeassistant/components/mqtt/switch.py b/homeassistant/components/mqtt/switch.py index c90174e8a01..0a54bcdb378 100644 --- a/homeassistant/components/mqtt/switch.py +++ b/homeassistant/components/mqtt/switch.py @@ -91,7 +91,7 @@ class MqttSwitch(MqttEntity, SwitchEntity, RestoreEntity): _entity_id_format = switch.ENTITY_ID_FORMAT _optimistic: bool - _is_on_map: dict[str | bytes, bool | None] + _is_on_map: dict[str | bytes | bytearray, bool | None] _command_template: Callable[[PublishPayloadType], PublishPayloadType] _value_template: Callable[[ReceivePayloadType], ReceivePayloadType] diff --git a/homeassistant/helpers/service_info/mqtt.py b/homeassistant/helpers/service_info/mqtt.py index 6ffc981ced1..a5284807617 100644 --- a/homeassistant/helpers/service_info/mqtt.py +++ b/homeassistant/helpers/service_info/mqtt.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from homeassistant.data_entry_flow import BaseServiceInfo -type ReceivePayloadType = str | bytes +type ReceivePayloadType = str | bytes | bytearray @dataclass(slots=True) From 7103b7fd8098bbc4d0a71403a47d45a3eab86de0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Wed, 11 Dec 2024 13:01:02 +0100 Subject: [PATCH 066/677] Use snapshot tests for remaining myuplink platforms (#132915) Co-authored-by: Joost Lekkerkerker --- .../components/myuplink/quality_scale.yaml | 6 +- .../myuplink/snapshots/test_number.ambr | 335 ++++++++++++++++++ .../myuplink/snapshots/test_select.ambr | 119 +++++++ .../myuplink/snapshots/test_switch.ambr | 185 ++++++++++ tests/components/myuplink/test_number.py | 34 +- tests/components/myuplink/test_select.py | 37 +- tests/components/myuplink/test_switch.py | 31 +- 7 files changed, 689 insertions(+), 58 deletions(-) create mode 100644 tests/components/myuplink/snapshots/test_number.ambr create mode 100644 tests/components/myuplink/snapshots/test_select.ambr create mode 100644 tests/components/myuplink/snapshots/test_switch.ambr diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml index b876f4c329c..661986a2f71 100644 --- a/homeassistant/components/myuplink/quality_scale.yaml +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -7,7 +7,7 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: todo + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: @@ -47,9 +47,7 @@ rules: status: exempt comment: Handled by coordinator reauthentication-flow: done - test-coverage: - status: todo - comment: PR is pending review + test-coverage: done # Gold devices: done diff --git a/tests/components/myuplink/snapshots/test_number.ambr b/tests/components/myuplink/snapshots/test_number.ambr new file mode 100644 index 00000000000..db1a8e0949f --- /dev/null +++ b/tests/components/myuplink/snapshots/test_number.ambr @@ -0,0 +1,335 @@ +# serializer version: 1 +# name: test_number_states[platforms0][number.gotham_city_degree_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_degree_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Degree minutes', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_degree_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Degree minutes', + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_degree_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-875.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_degree_minutes_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_degree_minutes_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Degree minutes', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-40940', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_degree_minutes_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Degree minutes', + 'max': 3000.0, + 'min': -3000.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_degree_minutes_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-875.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating offset climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47011', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Heating offset climate system 1', + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating offset climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47011', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_heating_offset_climate_system_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Heating offset climate system 1', + 'max': 10.0, + 'min': -10.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.gotham_city_heating_offset_climate_system_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_start_diff_additional_heat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'start diff additional heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-148072', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City start diff additional heat', + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_start_diff_additional_heat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '700.0', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_start_diff_additional_heat_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'start diff additional heat', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'degree_minutes', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-148072', + 'unit_of_measurement': 'DM', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City start diff additional heat', + 'max': 2000.0, + 'min': 100.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'DM', + }), + 'context': , + 'entity_id': 'number.gotham_city_start_diff_additional_heat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '700.0', + }) +# --- diff --git a/tests/components/myuplink/snapshots/test_select.ambr b/tests/components/myuplink/snapshots/test_select.ambr new file mode 100644 index 00000000000..eff06bc7f2d --- /dev/null +++ b/tests/components/myuplink/snapshots/test_select.ambr @@ -0,0 +1,119 @@ +# serializer version: 1 +# name: test_select_states[platforms0][select.gotham_city_comfort_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.gotham_city_comfort_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'comfort mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041', + 'unit_of_measurement': None, + }) +# --- +# name: test_select_states[platforms0][select.gotham_city_comfort_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City comfort mode', + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'context': , + 'entity_id': 'select.gotham_city_comfort_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Economy', + }) +# --- +# name: test_select_states[platforms0][select.gotham_city_comfort_mode_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.gotham_city_comfort_mode_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'comfort mode', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041', + 'unit_of_measurement': None, + }) +# --- +# name: test_select_states[platforms0][select.gotham_city_comfort_mode_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City comfort mode', + 'options': list([ + 'Smart control', + 'Economy', + 'Normal', + 'Luxury', + ]), + }), + 'context': , + 'entity_id': 'select.gotham_city_comfort_mode_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Economy', + }) +# --- diff --git a/tests/components/myuplink/snapshots/test_switch.ambr b/tests/components/myuplink/snapshots/test_switch.ambr new file mode 100644 index 00000000000..5d621e661ee --- /dev/null +++ b/tests/components/myuplink/snapshots/test_switch.ambr @@ -0,0 +1,185 @@ +# serializer version: 1 +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_increased_ventilation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'In\xadcreased venti\xadlation', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost_ventilation', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50005', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City In\xadcreased venti\xadlation', + }), + 'context': , + 'entity_id': 'switch.gotham_city_increased_ventilation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_increased_ventilation_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'In\xadcreased venti\xadlation', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost_ventilation', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50005', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_increased_ventilation_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City In\xadcreased venti\xadlation', + }), + 'context': , + 'entity_id': 'switch.gotham_city_increased_ventilation_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_temporary_lux', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tempo\xadrary lux', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temporary_lux', + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50004', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Tempo\xadrary lux', + }), + 'context': , + 'entity_id': 'switch.gotham_city_temporary_lux', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.gotham_city_temporary_lux_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tempo\xadrary lux', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temporary_lux', + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-50004', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_states[platforms0][switch.gotham_city_temporary_lux_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Tempo\xadrary lux', + }), + 'context': , + 'entity_id': 'switch.gotham_city_temporary_lux_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/myuplink/test_number.py b/tests/components/myuplink/test_number.py index 4106af1b5b9..ef7b1749782 100644 --- a/tests/components/myuplink/test_number.py +++ b/tests/components/myuplink/test_number.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock from aiohttp import ClientError import pytest +from syrupy import SnapshotAssertion from homeassistant.components.number import SERVICE_SET_VALUE from homeassistant.const import ATTR_ENTITY_ID, Platform @@ -11,6 +12,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from tests.common import MockConfigEntry, snapshot_platform + TEST_PLATFORM = Platform.NUMBER pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) @@ -31,24 +34,6 @@ async def test_entity_registry( assert entry.unique_id == ENTITY_UID -async def test_attributes( - hass: HomeAssistant, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test the entity attributes are correct.""" - - state = hass.states.get(ENTITY_ID) - assert state.state == "1.0" - assert state.attributes == { - "friendly_name": ENTITY_FRIENDLY_NAME, - "min": -10.0, - "max": 10.0, - "mode": "auto", - "step": 1.0, - } - - async def test_set_value( hass: HomeAssistant, mock_myuplink_client: MagicMock, @@ -98,3 +83,16 @@ async def test_entity_registry_smo20( entry = entity_registry.async_get("number.gotham_city_change_in_curve") assert entry.unique_id == "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47028" + + +async def test_number_states( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + setup_platform: None, +) -> None: + """Test number entity state.""" + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/myuplink/test_select.py b/tests/components/myuplink/test_select.py index 7ad2d17cb5d..f1797ebe5ad 100644 --- a/tests/components/myuplink/test_select.py +++ b/tests/components/myuplink/test_select.py @@ -4,6 +4,7 @@ from unittest.mock import MagicMock from aiohttp import ClientError import pytest +from syrupy import SnapshotAssertion from homeassistant.const import ( ATTR_ENTITY_ID, @@ -15,6 +16,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from tests.common import MockConfigEntry, snapshot_platform + TEST_PLATFORM = Platform.SELECT pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) @@ -23,27 +26,6 @@ ENTITY_FRIENDLY_NAME = "Gotham City comfort mode" ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041" -async def test_select_entity( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test that the entities are registered in the entity registry.""" - - entry = entity_registry.async_get(ENTITY_ID) - assert entry.unique_id == ENTITY_UID - - # Test the select attributes are correct. - - state = hass.states.get(ENTITY_ID) - assert state.state == "Economy" - assert state.attributes == { - "options": ["Smart control", "Economy", "Normal", "Luxury"], - "friendly_name": ENTITY_FRIENDLY_NAME, - } - - async def test_selecting( hass: HomeAssistant, mock_myuplink_client: MagicMock, @@ -87,3 +69,16 @@ async def test_entity_registry_smo20( entry = entity_registry.async_get("select.gotham_city_all") assert entry.unique_id == "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47660" + + +async def test_select_states( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + setup_platform: None, +) -> None: + """Test select entity state.""" + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/myuplink/test_switch.py b/tests/components/myuplink/test_switch.py index 5e309e7152e..82d381df7fc 100644 --- a/tests/components/myuplink/test_switch.py +++ b/tests/components/myuplink/test_switch.py @@ -4,18 +4,20 @@ from unittest.mock import MagicMock from aiohttp import ClientError import pytest +from syrupy import SnapshotAssertion from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_OFF, Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from tests.common import MockConfigEntry, snapshot_platform + TEST_PLATFORM = Platform.SWITCH pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) @@ -36,20 +38,6 @@ async def test_entity_registry( assert entry.unique_id == ENTITY_UID -async def test_attributes( - hass: HomeAssistant, - mock_myuplink_client: MagicMock, - setup_platform: None, -) -> None: - """Test the switch attributes are correct.""" - - state = hass.states.get(ENTITY_ID) - assert state.state == STATE_OFF - assert state.attributes == { - "friendly_name": ENTITY_FRIENDLY_NAME, - } - - @pytest.mark.parametrize( ("service"), [ @@ -109,3 +97,16 @@ async def test_entity_registry_smo20( entry = entity_registry.async_get(ENTITY_ID) assert entry.unique_id == ENTITY_UID + + +async def test_switch_states( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + setup_platform: None, +) -> None: + """Test switch entity state.""" + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) From ecfa88891868bd3ca0685d8dc9edc0ec87c1eec8 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 11 Dec 2024 13:52:53 +0100 Subject: [PATCH 067/677] Create quality_scale.yaml from integration scaffold script (#132199) Co-authored-by: Josef Zweck <24647999+zweckj@users.noreply.github.com> --- script/scaffold/__main__.py | 2 +- script/scaffold/generate.py | 2 +- .../config_flow/integration/config_flow.py | 2 +- .../integration/config_flow.py | 2 +- .../integration/config_flow.py | 2 +- .../integration/application_credentials.py | 6 +- .../integration/quality_scale.yaml | 60 +++++++++++++++++++ 7 files changed, 67 insertions(+), 9 deletions(-) create mode 100644 script/scaffold/templates/integration/integration/quality_scale.yaml diff --git a/script/scaffold/__main__.py b/script/scaffold/__main__.py index 45dbed790e6..93c787df50f 100644 --- a/script/scaffold/__main__.py +++ b/script/scaffold/__main__.py @@ -28,7 +28,7 @@ def get_arguments() -> argparse.Namespace: return parser.parse_args() -def main(): +def main() -> int: """Scaffold an integration.""" if not Path("requirements_all.txt").is_file(): print("Run from project root") diff --git a/script/scaffold/generate.py b/script/scaffold/generate.py index 0bee69b93f8..9ca5ead5719 100644 --- a/script/scaffold/generate.py +++ b/script/scaffold/generate.py @@ -19,7 +19,7 @@ def generate(template: str, info: Info) -> None: print() -def _generate(src_dir, target_dir, info: Info) -> None: +def _generate(src_dir: Path, target_dir: Path, info: Info) -> None: """Generate an integration.""" replaces = {"NEW_DOMAIN": info.domain, "NEW_NAME": info.name} diff --git a/script/scaffold/templates/config_flow/integration/config_flow.py b/script/scaffold/templates/config_flow/integration/config_flow.py index 0bff976f288..06db7592840 100644 --- a/script/scaffold/templates/config_flow/integration/config_flow.py +++ b/script/scaffold/templates/config_flow/integration/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for NEW_NAME integration.""" +"""Config flow for the NEW_NAME integration.""" from __future__ import annotations diff --git a/script/scaffold/templates/config_flow_discovery/integration/config_flow.py b/script/scaffold/templates/config_flow_discovery/integration/config_flow.py index e2cfed40e1d..570b70b85aa 100644 --- a/script/scaffold/templates/config_flow_discovery/integration/config_flow.py +++ b/script/scaffold/templates/config_flow_discovery/integration/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for NEW_NAME.""" +"""Config flow for the NEW_NAME integration.""" import my_pypi_dependency diff --git a/script/scaffold/templates/config_flow_helper/integration/config_flow.py b/script/scaffold/templates/config_flow_helper/integration/config_flow.py index 5d89fec2da2..c2ab7a205da 100644 --- a/script/scaffold/templates/config_flow_helper/integration/config_flow.py +++ b/script/scaffold/templates/config_flow_helper/integration/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for NEW_NAME integration.""" +"""Config flow for the NEW_NAME integration.""" from __future__ import annotations diff --git a/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py b/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py index 51ef70b1885..0f01c8402df 100644 --- a/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py +++ b/script/scaffold/templates/config_flow_oauth2/integration/application_credentials.py @@ -1,11 +1,9 @@ -"""application_credentials platform the NEW_NAME integration.""" +"""Application credentials platform for the NEW_NAME integration.""" from homeassistant.components.application_credentials import AuthorizationServer from homeassistant.core import HomeAssistant -# TODO Update with your own urls -OAUTH2_AUTHORIZE = "https://www.example.com/auth/authorize" -OAUTH2_TOKEN = "https://www.example.com/auth/token" +from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: diff --git a/script/scaffold/templates/integration/integration/quality_scale.yaml b/script/scaffold/templates/integration/integration/quality_scale.yaml new file mode 100644 index 00000000000..201a91652e5 --- /dev/null +++ b/script/scaffold/templates/integration/integration/quality_scale.yaml @@ -0,0 +1,60 @@ +rules: + # Bronze + action-setup: todo + appropriate-polling: todo + brands: todo + common-modules: todo + config-flow-test-coverage: todo + config-flow: todo + dependency-transparency: todo + docs-actions: todo + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: todo + entity-unique-id: todo + has-entity-name: todo + runtime-data: todo + test-before-configure: todo + test-before-setup: todo + unique-config-entry: todo + + # Silver + action-exceptions: todo + config-entry-unloading: todo + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: todo + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: todo + diagnostics: todo + discovery-update-info: todo + discovery: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: todo + entity-disabled-by-default: todo + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: todo + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo From f9744799704ce91abb7988d09bcae924a4bdae2e Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Wed, 11 Dec 2024 13:53:14 +0100 Subject: [PATCH 068/677] Velbus add quality_scale.yaml (#131377) Co-authored-by: Allen Porter Co-authored-by: Joost Lekkerkerker --- .../components/velbus/quality_scale.yaml | 82 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 82 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/velbus/quality_scale.yaml diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml new file mode 100644 index 00000000000..f3ab8f607b6 --- /dev/null +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -0,0 +1,82 @@ +rules: + # Bronze + action-setup: todo + appropriate-polling: + status: exempt + comment: | + This integration does not poll. + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: | + Split test_flow_usb from the test that tests already_configured, test_flow_usb should also assert the unique_id of the entry + config-flow: + status: todo + comment: | + Dynamically build up the port parameter based on inputs provided by the user, do not fill-in a name parameter, build it up in the config flow + dependency-transparency: done + docs-actions: todo + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: todo + entity-unique-id: done + has-entity-name: todo + runtime-data: todo + test-before-configure: done + test-before-setup: todo + unique-config-entry: + status: todo + comment: | + Manual step does not generate an unique-id + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: todo + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: todo + comment: | + Dynamic devices are discovered, but no entities are created for them + entity-category: done + entity-device-class: todo + entity-disabled-by-default: done + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration communicates via serial/usb/tcp and does not require a web session. + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 5a09f8c7bd8..aa62b5a5120 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -1105,7 +1105,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "v2c", "vallox", "vasttrafik", - "velbus", "velux", "venstar", "vera", From 05b23d081b023a26adde0ad836cbec2212ac5f6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Wed, 11 Dec 2024 14:09:33 +0100 Subject: [PATCH 069/677] Set quality_scale for myUplink to Silver (#132923) --- homeassistant/components/myuplink/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/myuplink/manifest.json b/homeassistant/components/myuplink/manifest.json index 0e638a72715..8438d24194c 100644 --- a/homeassistant/components/myuplink/manifest.json +++ b/homeassistant/components/myuplink/manifest.json @@ -6,5 +6,6 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/myuplink", "iot_class": "cloud_polling", + "quality_scale": "silver", "requirements": ["myuplink==0.6.0"] } From 17533823075d68068ca9cf69c90b12088a0a2eb8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:11:29 +0100 Subject: [PATCH 070/677] Adjust lifx to use local _ATTR_COLOR_TEMP constant (#132840) --- homeassistant/components/lifx/const.py | 3 +++ homeassistant/components/lifx/manager.py | 6 +++--- homeassistant/components/lifx/util.py | 7 +++++-- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/lifx/const.py b/homeassistant/components/lifx/const.py index 9b213cc9f6d..667afe1125d 100644 --- a/homeassistant/components/lifx/const.py +++ b/homeassistant/components/lifx/const.py @@ -64,3 +64,6 @@ DATA_LIFX_MANAGER = "lifx_manager" LIFX_CEILING_PRODUCT_IDS = {176, 177} _LOGGER = logging.getLogger(__package__) + +# _ATTR_COLOR_TEMP deprecated - to be removed in 2026.1 +_ATTR_COLOR_TEMP = "color_temp" diff --git a/homeassistant/components/lifx/manager.py b/homeassistant/components/lifx/manager.py index 759d08707cd..27e62717e96 100644 --- a/homeassistant/components/lifx/manager.py +++ b/homeassistant/components/lifx/manager.py @@ -15,7 +15,6 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -30,7 +29,7 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.service import async_extract_referenced_entity_ids -from .const import ATTR_THEME, DATA_LIFX_MANAGER, DOMAIN +from .const import _ATTR_COLOR_TEMP, ATTR_THEME, DATA_LIFX_MANAGER, DOMAIN from .coordinator import LIFXUpdateCoordinator, Light from .util import convert_8_to_16, find_hsbk @@ -126,7 +125,8 @@ LIFX_EFFECT_PULSE_SCHEMA = cv.make_entity_service_schema( vol.Exclusive(ATTR_COLOR_TEMP_KELVIN, COLOR_GROUP): vol.All( vol.Coerce(int), vol.Range(min=1500, max=9000) ), - vol.Exclusive(ATTR_COLOR_TEMP, COLOR_GROUP): cv.positive_int, + # _ATTR_COLOR_TEMP deprecated - to be removed in 2026.1 + vol.Exclusive(_ATTR_COLOR_TEMP, COLOR_GROUP): cv.positive_int, ATTR_PERIOD: vol.All(vol.Coerce(float), vol.Range(min=0.05)), ATTR_CYCLES: vol.All(vol.Coerce(float), vol.Range(min=1)), ATTR_MODE: vol.In(PULSE_MODES), diff --git a/homeassistant/components/lifx/util.py b/homeassistant/components/lifx/util.py index 62d0ea66f81..ffffe7a4856 100644 --- a/homeassistant/components/lifx/util.py +++ b/homeassistant/components/lifx/util.py @@ -27,6 +27,7 @@ from homeassistant.helpers import device_registry as dr import homeassistant.util.color as color_util from .const import ( + _ATTR_COLOR_TEMP, _LOGGER, DEFAULT_ATTEMPTS, DOMAIN, @@ -112,13 +113,15 @@ def find_hsbk(hass: HomeAssistant, **kwargs: Any) -> list[float | int | None] | saturation = int(saturation / 100 * 65535) kelvin = 3500 - if "color_temp" in kwargs: # old ATTR_COLOR_TEMP + if _ATTR_COLOR_TEMP in kwargs: # added in 2025.1, can be removed in 2026.1 _LOGGER.warning( "The 'color_temp' parameter is deprecated. Please use 'color_temp_kelvin' for" " all service calls" ) - kelvin = color_util.color_temperature_mired_to_kelvin(kwargs.pop("color_temp")) + kelvin = color_util.color_temperature_mired_to_kelvin( + kwargs.pop(_ATTR_COLOR_TEMP) + ) saturation = 0 if ATTR_COLOR_TEMP_KELVIN in kwargs: From 555d7f1ea420acb969194ab00d91e85626a368d9 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Wed, 11 Dec 2024 09:40:18 -0500 Subject: [PATCH 071/677] Guard Vodafone Station updates against bad data (#132921) guard Vodafone Station updates against bad data --- homeassistant/components/vodafone_station/coordinator.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/vodafone_station/coordinator.py b/homeassistant/components/vodafone_station/coordinator.py index d2f408e355b..e95ca2b5976 100644 --- a/homeassistant/components/vodafone_station/coordinator.py +++ b/homeassistant/components/vodafone_station/coordinator.py @@ -2,6 +2,7 @@ from dataclasses import dataclass from datetime import datetime, timedelta +from json.decoder import JSONDecodeError from typing import Any from aiovodafone import VodafoneStationDevice, VodafoneStationSercommApi, exceptions @@ -107,6 +108,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): exceptions.CannotConnect, exceptions.AlreadyLogged, exceptions.GenericLoginError, + JSONDecodeError, ) as err: raise UpdateFailed(f"Error fetching data: {err!r}") from err except (ConfigEntryAuthFailed, UpdateFailed): From ee4db13c2aa64044ba5524d17881c97f694b6ab9 Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:52:43 +0100 Subject: [PATCH 072/677] Add data description to suez_water config flow (#132466) * Suez_water: config flow data_descriptions * Rename counter by meter * Use placeholders --- homeassistant/components/suez_water/config_flow.py | 5 ++++- .../components/suez_water/quality_scale.yaml | 4 ++-- homeassistant/components/suez_water/strings.json | 12 +++++++++--- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/suez_water/config_flow.py b/homeassistant/components/suez_water/config_flow.py index 2a1edea35f1..b24dc1815ee 100644 --- a/homeassistant/components/suez_water/config_flow.py +++ b/homeassistant/components/suez_water/config_flow.py @@ -82,7 +82,10 @@ class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + description_placeholders={"tout_sur_mon_eau": "Tout sur mon Eau"}, ) diff --git a/homeassistant/components/suez_water/quality_scale.yaml b/homeassistant/components/suez_water/quality_scale.yaml index 0ca4c2e0f27..0980ee472eb 100644 --- a/homeassistant/components/suez_water/quality_scale.yaml +++ b/homeassistant/components/suez_water/quality_scale.yaml @@ -1,9 +1,9 @@ rules: # Bronze - config-flow: todo + config-flow: done test-before-configure: done unique-config-entry: done - config-flow-test-coverage: todo + config-flow-test-coverage: done runtime-data: status: todo comment: coordinator is created during setup, should be stored in runtime_data diff --git a/homeassistant/components/suez_water/strings.json b/homeassistant/components/suez_water/strings.json index 6be2affab97..be2d4849e76 100644 --- a/homeassistant/components/suez_water/strings.json +++ b/homeassistant/components/suez_water/strings.json @@ -5,15 +5,21 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "counter_id": "Counter id" - } + "counter_id": "Meter id" + }, + "data_description": { + "username": "Enter your login associated with your {tout_sur_mon_eau} account", + "password": "Enter your password associated with your {tout_sur_mon_eau} account", + "counter_id": "Enter your meter id (ex: 12345678). Should be found automatically during setup, if not see integration documentation for more information" + }, + "description": "Connect your suez water {tout_sur_mon_eau} account to retrieve your water consumption" } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]", - "counter_not_found": "Could not find counter id automatically" + "counter_not_found": "Could not find meter id automatically" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" From 0d71828defe04b03dda3fc5c8995a69452f65318 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 16:11:14 +0100 Subject: [PATCH 073/677] Migrate mqtt lights to use Kelvin (#132828) * Migrate mqtt lights to use Kelvin * Adjust restore_cache tests * Adjust tests --- .../components/mqtt/light/schema_basic.py | 25 +++++++---- .../components/mqtt/light/schema_json.py | 42 +++++++++++++------ .../components/mqtt/light/schema_template.py | 38 +++++++++++------ tests/components/mqtt/test_light.py | 4 +- tests/components/mqtt/test_light_json.py | 6 +-- tests/components/mqtt/test_light_template.py | 4 +- 6 files changed, 80 insertions(+), 39 deletions(-) diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index 8a1b7a2a76a..d58d52377dd 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -246,7 +246,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): _optimistic: bool _optimistic_brightness: bool _optimistic_color_mode: bool - _optimistic_color_temp: bool + _optimistic_color_temp_kelvin: bool _optimistic_effect: bool _optimistic_hs_color: bool _optimistic_rgb_color: bool @@ -327,7 +327,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): and topic[CONF_RGB_STATE_TOPIC] is None ) ) - self._optimistic_color_temp = ( + self._optimistic_color_temp_kelvin = ( optimistic or topic[CONF_COLOR_TEMP_STATE_TOPIC] is None ) self._optimistic_effect = optimistic or topic[CONF_EFFECT_STATE_TOPIC] is None @@ -518,7 +518,9 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): if self._optimistic_color_mode: self._attr_color_mode = ColorMode.COLOR_TEMP - self._attr_color_temp = int(payload) + self._attr_color_temp_kelvin = color_util.color_temperature_mired_to_kelvin( + int(payload) + ) @callback def _effect_received(self, msg: ReceiveMessage) -> None: @@ -592,7 +594,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): self.add_subscription( CONF_COLOR_TEMP_STATE_TOPIC, self._color_temp_received, - {"_attr_color_mode", "_attr_color_temp"}, + {"_attr_color_mode", "_attr_color_temp_kelvin"}, ) self.add_subscription( CONF_EFFECT_STATE_TOPIC, self._effect_received, {"_attr_effect"} @@ -631,7 +633,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): restore_state(ATTR_RGBW_COLOR) restore_state(ATTR_RGBWW_COLOR) restore_state(ATTR_COLOR_MODE) - restore_state(ATTR_COLOR_TEMP) + restore_state(ATTR_COLOR_TEMP_KELVIN) restore_state(ATTR_EFFECT) restore_state(ATTR_HS_COLOR) restore_state(ATTR_XY_COLOR) @@ -803,14 +805,21 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): await publish(CONF_RGBWW_COMMAND_TOPIC, rgbww_s) should_update |= set_optimistic(ATTR_BRIGHTNESS, kwargs[ATTR_BRIGHTNESS]) if ( - ATTR_COLOR_TEMP in kwargs + ATTR_COLOR_TEMP_KELVIN in kwargs and self._topic[CONF_COLOR_TEMP_COMMAND_TOPIC] is not None ): ct_command_tpl = self._command_templates[CONF_COLOR_TEMP_COMMAND_TEMPLATE] - color_temp = ct_command_tpl(int(kwargs[ATTR_COLOR_TEMP]), None) + color_temp = ct_command_tpl( + color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ), + None, + ) await publish(CONF_COLOR_TEMP_COMMAND_TOPIC, color_temp) should_update |= set_optimistic( - ATTR_COLOR_TEMP, kwargs[ATTR_COLOR_TEMP], ColorMode.COLOR_TEMP + ATTR_COLOR_TEMP_KELVIN, + kwargs[ATTR_COLOR_TEMP_KELVIN], + ColorMode.COLOR_TEMP, ) if ( diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index 89f338f6bab..703117190eb 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -12,7 +12,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -273,8 +273,16 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" - self._attr_max_mireds = config.get(CONF_MAX_MIREDS, super().max_mireds) - self._attr_min_mireds = config.get(CONF_MIN_MIREDS, super().min_mireds) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + if (max_mireds := config.get(CONF_MAX_MIREDS)) + else super().min_color_temp_kelvin + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + if (min_mireds := config.get(CONF_MIN_MIREDS)) + else super().max_color_temp_kelvin + ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) self._topic = { @@ -370,7 +378,11 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): return try: if color_mode == ColorMode.COLOR_TEMP: - self._attr_color_temp = int(values["color_temp"]) + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + values["color_temp"] + ) + ) self._attr_color_mode = ColorMode.COLOR_TEMP elif color_mode == ColorMode.HS: hue = float(values["color"]["h"]) @@ -469,9 +481,13 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): # Deprecated color handling try: if values["color_temp"] is None: - self._attr_color_temp = None + self._attr_color_temp_kelvin = None else: - self._attr_color_temp = int(values["color_temp"]) # type: ignore[arg-type] + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin( + values["color_temp"] # type: ignore[arg-type] + ) + ) except KeyError: pass except ValueError: @@ -496,7 +512,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._state_received, { "_attr_brightness", - "_attr_color_temp", + "_attr_color_temp_kelvin", "_attr_effect", "_attr_hs_color", "_attr_is_on", @@ -522,8 +538,8 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._attr_color_mode = last_attributes.get( ATTR_COLOR_MODE, self.color_mode ) - self._attr_color_temp = last_attributes.get( - ATTR_COLOR_TEMP, self.color_temp + self._attr_color_temp_kelvin = last_attributes.get( + ATTR_COLOR_TEMP_KELVIN, self.color_temp_kelvin ) self._attr_effect = last_attributes.get(ATTR_EFFECT, self.effect) self._attr_hs_color = last_attributes.get(ATTR_HS_COLOR, self.hs_color) @@ -690,12 +706,14 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._attr_brightness = kwargs[ATTR_BRIGHTNESS] should_update = True - if ATTR_COLOR_TEMP in kwargs: - message["color_temp"] = int(kwargs[ATTR_COLOR_TEMP]) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + message["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if self._optimistic: self._attr_color_mode = ColorMode.COLOR_TEMP - self._attr_color_temp = kwargs[ATTR_COLOR_TEMP] + self._attr_color_temp_kelvin = kwargs[ATTR_COLOR_TEMP_KELVIN] self._attr_hs_color = None should_update = True diff --git a/homeassistant/components/mqtt/light/schema_template.py b/homeassistant/components/mqtt/light/schema_template.py index c4f9cad44c5..7427d25533e 100644 --- a/homeassistant/components/mqtt/light/schema_template.py +++ b/homeassistant/components/mqtt/light/schema_template.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -126,8 +126,16 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" - self._attr_max_mireds = config.get(CONF_MAX_MIREDS, super().max_mireds) - self._attr_min_mireds = config.get(CONF_MIN_MIREDS, super().min_mireds) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + if (max_mireds := config.get(CONF_MAX_MIREDS)) + else super().min_color_temp_kelvin + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + if (min_mireds := config.get(CONF_MIN_MIREDS)) + else super().max_color_temp_kelvin + ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) self._topics = { @@ -213,8 +221,10 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): color_temp = self._value_templates[CONF_COLOR_TEMP_TEMPLATE]( msg.payload ) - self._attr_color_temp = ( - int(color_temp) if color_temp != "None" else None + self._attr_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(int(color_temp)) + if color_temp != "None" + else None ) except ValueError: _LOGGER.warning("Invalid color temperature value received") @@ -256,7 +266,7 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): { "_attr_brightness", "_attr_color_mode", - "_attr_color_temp", + "_attr_color_temp_kelvin", "_attr_effect", "_attr_hs_color", "_attr_is_on", @@ -275,8 +285,10 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): if last_state.attributes.get(ATTR_HS_COLOR): self._attr_hs_color = last_state.attributes.get(ATTR_HS_COLOR) self._update_color_mode() - if last_state.attributes.get(ATTR_COLOR_TEMP): - self._attr_color_temp = last_state.attributes.get(ATTR_COLOR_TEMP) + if last_state.attributes.get(ATTR_COLOR_TEMP_KELVIN): + self._attr_color_temp_kelvin = last_state.attributes.get( + ATTR_COLOR_TEMP_KELVIN + ) if last_state.attributes.get(ATTR_EFFECT): self._attr_effect = last_state.attributes.get(ATTR_EFFECT) @@ -295,11 +307,13 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): if self._optimistic: self._attr_brightness = kwargs[ATTR_BRIGHTNESS] - if ATTR_COLOR_TEMP in kwargs: - values["color_temp"] = int(kwargs[ATTR_COLOR_TEMP]) + if ATTR_COLOR_TEMP_KELVIN in kwargs: + values["color_temp"] = color_util.color_temperature_kelvin_to_mired( + kwargs[ATTR_COLOR_TEMP_KELVIN] + ) if self._optimistic: - self._attr_color_temp = kwargs[ATTR_COLOR_TEMP] + self._attr_color_temp_kelvin = kwargs[ATTR_COLOR_TEMP_KELVIN] self._attr_hs_color = None self._update_color_mode() @@ -325,7 +339,7 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): values["sat"] = hs_color[1] if self._optimistic: - self._attr_color_temp = None + self._attr_color_temp_kelvin = None self._attr_hs_color = kwargs[ATTR_HS_COLOR] self._update_color_mode() diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index b11484d55fb..8e9e2abb85a 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -1008,7 +1008,7 @@ async def test_sending_mqtt_commands_and_optimistic( "brightness": 95, "hs_color": [100, 100], "effect": "random", - "color_temp": 100, + "color_temp_kelvin": 100000, "color_mode": "hs", }, ) @@ -1021,7 +1021,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("brightness") == 95 assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "hs" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes assert state.attributes.get(ATTR_ASSUMED_STATE) diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index f0da483e706..7d8ff241d3c 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -1053,7 +1053,7 @@ async def test_sending_mqtt_commands_and_optimistic( "brightness": 95, "hs_color": [100, 100], "effect": "random", - "color_temp": 100, + "color_temp_kelvin": 10000, }, ) mock_restore_cache(hass, (fake_state,)) @@ -1065,7 +1065,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("brightness") == 95 assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" - assert state.attributes.get("color_temp") is None # hs_color has priority + assert state.attributes.get("color_temp_kelvin") is None # hs_color has priority color_modes = [light.ColorMode.COLOR_TEMP, light.ColorMode.HS] assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes expected_features = ( @@ -1205,7 +1205,7 @@ async def test_sending_mqtt_commands_and_optimistic2( "on", { "brightness": 95, - "color_temp": 100, + "color_temp_kelvin": 10000, "color_mode": "rgb", "effect": "random", "hs_color": [100, 100], diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index 59fd3eb88ed..64cdff370be 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -432,7 +432,7 @@ async def test_sending_mqtt_commands_and_optimistic( "brightness": 95, "hs_color": [100, 100], "effect": "random", - "color_temp": 100, + "color_temp_kelvin": 10000, }, ) mock_restore_cache(hass, (fake_state,)) @@ -443,7 +443,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON assert state.attributes.get("hs_color") == (100, 100) assert state.attributes.get("effect") == "random" - assert state.attributes.get("color_temp") is None # hs_color has priority + assert state.attributes.get("color_temp_kelvin") is None # hs_color has priority assert state.attributes.get(ATTR_ASSUMED_STATE) await common.async_turn_off(hass, "light.test") From 00ab5db6612ff5b7cf541df2639738f3b7a42473 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Wed, 11 Dec 2024 16:41:48 +0100 Subject: [PATCH 074/677] Split the velbus services code in its own file (#131375) --- homeassistant/components/velbus/__init__.py | 121 ++---------------- .../components/velbus/quality_scale.yaml | 2 +- homeassistant/components/velbus/services.py | 116 +++++++++++++++++ 3 files changed, 130 insertions(+), 109 deletions(-) create mode 100644 homeassistant/components/velbus/services.py diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index ca8cfb0f2a7..fec6395c890 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -2,30 +2,22 @@ from __future__ import annotations -from contextlib import suppress import logging import os import shutil from velbusaio.controller import Velbus -import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ADDRESS, CONF_PORT, Platform -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.const import CONF_PORT, Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.storage import STORAGE_DIR +from homeassistant.helpers.typing import ConfigType -from .const import ( - CONF_INTERFACE, - CONF_MEMO_TEXT, - DOMAIN, - SERVICE_CLEAR_CACHE, - SERVICE_SCAN, - SERVICE_SET_MEMO_TEXT, - SERVICE_SYNC, -) +from .const import DOMAIN +from .services import setup_services _LOGGER = logging.getLogger(__name__) @@ -40,6 +32,8 @@ PLATFORMS = [ Platform.SWITCH, ] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + async def velbus_connect_task( controller: Velbus, hass: HomeAssistant, entry_id: str @@ -67,6 +61,12 @@ def _migrate_device_identifiers(hass: HomeAssistant, entry_id: str) -> None: dev_reg.async_update_device(device.id, new_identifiers=new_identifier) +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the actions for the Velbus component.""" + setup_services(hass) + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Establish connection with velbus.""" hass.data.setdefault(DOMAIN, {}) @@ -85,97 +85,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - if hass.services.has_service(DOMAIN, SERVICE_SCAN): - return True - - def check_entry_id(interface: str) -> str: - for config_entry in hass.config_entries.async_entries(DOMAIN): - if "port" in config_entry.data and config_entry.data["port"] == interface: - return config_entry.entry_id - raise vol.Invalid( - "The interface provided is not defined as a port in a Velbus integration" - ) - - async def scan(call: ServiceCall) -> None: - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].scan() - - hass.services.async_register( - DOMAIN, - SERVICE_SCAN, - scan, - vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), - ) - - async def syn_clock(call: ServiceCall) -> None: - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].sync_clock() - - hass.services.async_register( - DOMAIN, - SERVICE_SYNC, - syn_clock, - vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), - ) - - async def set_memo_text(call: ServiceCall) -> None: - """Handle Memo Text service call.""" - memo_text = call.data[CONF_MEMO_TEXT] - await ( - hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] - .get_module(call.data[CONF_ADDRESS]) - .set_memo_text(memo_text) - ) - - hass.services.async_register( - DOMAIN, - SERVICE_SET_MEMO_TEXT, - set_memo_text, - vol.Schema( - { - vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), - vol.Required(CONF_ADDRESS): vol.All( - vol.Coerce(int), vol.Range(min=0, max=255) - ), - vol.Optional(CONF_MEMO_TEXT, default=""): cv.string, - } - ), - ) - - async def clear_cache(call: ServiceCall) -> None: - """Handle a clear cache service call.""" - # clear the cache - with suppress(FileNotFoundError): - if call.data.get(CONF_ADDRESS): - await hass.async_add_executor_job( - os.unlink, - hass.config.path( - STORAGE_DIR, - f"velbuscache-{call.data[CONF_INTERFACE]}/{call.data[CONF_ADDRESS]}.p", - ), - ) - else: - await hass.async_add_executor_job( - shutil.rmtree, - hass.config.path( - STORAGE_DIR, f"velbuscache-{call.data[CONF_INTERFACE]}/" - ), - ) - # call a scan to repopulate - await scan(call) - - hass.services.async_register( - DOMAIN, - SERVICE_CLEAR_CACHE, - clear_cache, - vol.Schema( - { - vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), - vol.Optional(CONF_ADDRESS): vol.All( - vol.Coerce(int), vol.Range(min=0, max=255) - ), - } - ), - ) - return True @@ -186,10 +95,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data[DOMAIN].pop(entry.entry_id) if not hass.data[DOMAIN]: hass.data.pop(DOMAIN) - hass.services.async_remove(DOMAIN, SERVICE_SCAN) - hass.services.async_remove(DOMAIN, SERVICE_SYNC) - hass.services.async_remove(DOMAIN, SERVICE_SET_MEMO_TEXT) - hass.services.async_remove(DOMAIN, SERVICE_CLEAR_CACHE) return unload_ok diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index f3ab8f607b6..adea896a1c6 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -1,6 +1,6 @@ rules: # Bronze - action-setup: todo + action-setup: done appropriate-polling: status: exempt comment: | diff --git a/homeassistant/components/velbus/services.py b/homeassistant/components/velbus/services.py new file mode 100644 index 00000000000..83633eb66bc --- /dev/null +++ b/homeassistant/components/velbus/services.py @@ -0,0 +1,116 @@ +"""Support for Velbus devices.""" + +from __future__ import annotations + +from contextlib import suppress +import os +import shutil + +import voluptuous as vol + +from homeassistant.const import CONF_ADDRESS +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.storage import STORAGE_DIR + +from .const import ( + CONF_INTERFACE, + CONF_MEMO_TEXT, + DOMAIN, + SERVICE_CLEAR_CACHE, + SERVICE_SCAN, + SERVICE_SET_MEMO_TEXT, + SERVICE_SYNC, +) + + +def setup_services(hass: HomeAssistant) -> None: + """Register the velbus services.""" + + def check_entry_id(interface: str) -> str: + for config_entry in hass.config_entries.async_entries(DOMAIN): + if "port" in config_entry.data and config_entry.data["port"] == interface: + return config_entry.entry_id + raise vol.Invalid( + "The interface provided is not defined as a port in a Velbus integration" + ) + + async def scan(call: ServiceCall) -> None: + await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].scan() + + async def syn_clock(call: ServiceCall) -> None: + await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].sync_clock() + + async def set_memo_text(call: ServiceCall) -> None: + """Handle Memo Text service call.""" + memo_text = call.data[CONF_MEMO_TEXT] + await ( + hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] + .get_module(call.data[CONF_ADDRESS]) + .set_memo_text(memo_text.async_render()) + ) + + async def clear_cache(call: ServiceCall) -> None: + """Handle a clear cache service call.""" + # clear the cache + with suppress(FileNotFoundError): + if call.data.get(CONF_ADDRESS): + await hass.async_add_executor_job( + os.unlink, + hass.config.path( + STORAGE_DIR, + f"velbuscache-{call.data[CONF_INTERFACE]}/{call.data[CONF_ADDRESS]}.p", + ), + ) + else: + await hass.async_add_executor_job( + shutil.rmtree, + hass.config.path( + STORAGE_DIR, f"velbuscache-{call.data[CONF_INTERFACE]}/" + ), + ) + # call a scan to repopulate + await scan(call) + + hass.services.async_register( + DOMAIN, + SERVICE_SCAN, + scan, + vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), + ) + + hass.services.async_register( + DOMAIN, + SERVICE_SYNC, + syn_clock, + vol.Schema({vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id)}), + ) + + hass.services.async_register( + DOMAIN, + SERVICE_SET_MEMO_TEXT, + set_memo_text, + vol.Schema( + { + vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), + vol.Required(CONF_ADDRESS): vol.All( + vol.Coerce(int), vol.Range(min=0, max=255) + ), + vol.Optional(CONF_MEMO_TEXT, default=""): cv.template, + } + ), + ) + + hass.services.async_register( + DOMAIN, + SERVICE_CLEAR_CACHE, + clear_cache, + vol.Schema( + { + vol.Required(CONF_INTERFACE): vol.All(cv.string, check_entry_id), + vol.Optional(CONF_ADDRESS): vol.All( + vol.Coerce(int), vol.Range(min=0, max=255) + ), + } + ), + ) From 39f8de015910ae6ef0b4d224802435d22b2b008e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 17:18:54 +0100 Subject: [PATCH 075/677] Fix mqtt light attributes (#132941) --- homeassistant/components/mqtt/light/schema_basic.py | 12 ++++++++++-- homeassistant/components/mqtt/light/schema_json.py | 2 +- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index d58d52377dd..a4d3ecb5f21 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -261,8 +261,16 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): def _setup_from_config(self, config: ConfigType) -> None: """(Re)Setup the entity.""" - self._attr_min_mireds = config.get(CONF_MIN_MIREDS, super().min_mireds) - self._attr_max_mireds = config.get(CONF_MAX_MIREDS, super().max_mireds) + self._attr_min_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(max_mireds) + if (max_mireds := config.get(CONF_MAX_MIREDS)) + else super().min_color_temp_kelvin + ) + self._attr_max_color_temp_kelvin = ( + color_util.color_temperature_mired_to_kelvin(min_mireds) + if (min_mireds := config.get(CONF_MIN_MIREDS)) + else super().max_color_temp_kelvin + ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) topic: dict[str, str | None] = { diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index 703117190eb..5901967610a 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -639,7 +639,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): message["color"]["s"] = hs_color[1] if self._optimistic: - self._attr_color_temp = None + self._attr_color_temp_kelvin = None self._attr_hs_color = kwargs[ATTR_HS_COLOR] should_update = True From 502a221feb345ce434e265be5dcfb44176828950 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 11 Dec 2024 17:20:49 +0100 Subject: [PATCH 076/677] Set go2rtc quality scale to internal (#132945) --- homeassistant/components/go2rtc/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/go2rtc/manifest.json b/homeassistant/components/go2rtc/manifest.json index 1cd9e8c1107..07dbd3bd29b 100644 --- a/homeassistant/components/go2rtc/manifest.json +++ b/homeassistant/components/go2rtc/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/go2rtc", "integration_type": "system", "iot_class": "local_polling", - "quality_scale": "legacy", + "quality_scale": "internal", "requirements": ["go2rtc-client==0.1.2"], "single_config_entry": true } From 94260147d757a7f70ce94f685b952cc66794dc99 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 11 Dec 2024 11:52:02 -0600 Subject: [PATCH 077/677] Fix pipeline conversation language (#132896) --- .../components/assist_pipeline/pipeline.py | 12 ++- .../assist_pipeline/snapshots/test_init.ambr | 55 +++++++++++++- tests/components/assist_pipeline/test_init.py | 75 +++++++++++++++++++ .../conversation/test_default_agent.py | 47 ++++++++++++ 4 files changed, 185 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 9e9e84fb5d6..f8f6be3a40f 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -29,6 +29,7 @@ from homeassistant.components import ( from homeassistant.components.tts import ( generate_media_source_id as tts_generate_media_source_id, ) +from homeassistant.const import MATCH_ALL from homeassistant.core import Context, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent @@ -1009,12 +1010,19 @@ class PipelineRun: if self.intent_agent is None: raise RuntimeError("Recognize intent was not prepared") + if self.pipeline.conversation_language == MATCH_ALL: + # LLMs support all languages ('*') so use pipeline language for + # intent fallback. + input_language = self.pipeline.language + else: + input_language = self.pipeline.conversation_language + self.process_event( PipelineEvent( PipelineEventType.INTENT_START, { "engine": self.intent_agent, - "language": self.pipeline.conversation_language, + "language": input_language, "intent_input": intent_input, "conversation_id": conversation_id, "device_id": device_id, @@ -1029,7 +1037,7 @@ class PipelineRun: context=self.context, conversation_id=conversation_id, device_id=device_id, - language=self.pipeline.language, + language=input_language, agent_id=self.intent_agent, ) processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index 3b829e0e14a..d3241b8ac1f 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -142,7 +142,7 @@ 'data': dict({ 'code': 'no_intent_match', }), - 'language': 'en', + 'language': 'en-US', 'response_type': 'error', 'speech': dict({ 'plain': dict({ @@ -233,7 +233,7 @@ 'data': dict({ 'code': 'no_intent_match', }), - 'language': 'en', + 'language': 'en-US', 'response_type': 'error', 'speech': dict({ 'plain': dict({ @@ -387,6 +387,57 @@ }), ]) # --- +# name: test_pipeline_language_used_instead_of_conversation_language + list([ + dict({ + 'data': dict({ + 'language': 'en', + 'pipeline': , + }), + 'type': , + }), + dict({ + 'data': dict({ + 'conversation_id': None, + 'device_id': None, + 'engine': 'conversation.home_assistant', + 'intent_input': 'test input', + 'language': 'en', + 'prefer_local_intents': False, + }), + 'type': , + }), + dict({ + 'data': dict({ + 'intent_output': dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + }), + }), + }), + 'processed_locally': True, + }), + 'type': , + }), + dict({ + 'data': None, + 'type': , + }), + ]) +# --- # name: test_wake_word_detection_aborted list([ dict({ diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index b177530219e..a3e65766c34 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -23,6 +23,7 @@ from homeassistant.components.assist_pipeline.const import ( CONF_DEBUG_RECORDING_DIR, DOMAIN, ) +from homeassistant.const import MATCH_ALL from homeassistant.core import Context, HomeAssistant from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -1098,3 +1099,77 @@ async def test_prefer_local_intents( ] == "Order confirmed" ) + + +async def test_pipeline_language_used_instead_of_conversation_language( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_components, + snapshot: SnapshotAssertion, +) -> None: + """Test that the pipeline language is used when the conversation language is '*' (all languages).""" + client = await hass_ws_client(hass) + + events: list[assist_pipeline.PipelineEvent] = [] + + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline/create", + "conversation_engine": "homeassistant", + "conversation_language": MATCH_ALL, + "language": "en", + "name": "test_name", + "stt_engine": "test", + "stt_language": "en-US", + "tts_engine": "test", + "tts_language": "en-US", + "tts_voice": "Arnold Schwarzenegger", + "wake_word_entity": None, + "wake_word_id": None, + } + ) + msg = await client.receive_json() + assert msg["success"] + pipeline_id = msg["result"]["id"] + pipeline = assist_pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + intent_input="test input", + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.INTENT, + end_stage=assist_pipeline.PipelineStage.INTENT, + event_callback=events.append, + ), + ) + await pipeline_input.validate() + + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_converse", + return_value=conversation.ConversationResult( + intent.IntentResponse(pipeline.language) + ), + ) as mock_async_converse: + await pipeline_input.execute() + + # Check intent start event + assert process_events(events) == snapshot + intent_start: assist_pipeline.PipelineEvent | None = None + for event in events: + if event.type == assist_pipeline.PipelineEventType.INTENT_START: + intent_start = event + break + + assert intent_start is not None + + # Pipeline language (en) should be used instead of '*' + assert intent_start.data.get("language") == pipeline.language + + # Check input to async_converse + mock_async_converse.assert_called_once() + assert ( + mock_async_converse.call_args_list[0].kwargs.get("language") + == pipeline.language + ) diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 58d2b0d48bf..8df1647d18c 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -30,6 +30,7 @@ from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, STATE_CLOSED, + STATE_OFF, STATE_ON, STATE_UNKNOWN, EntityCategory, @@ -3049,3 +3050,49 @@ async def test_entities_names_are_not_templates(hass: HomeAssistant) -> None: assert result is not None assert result.response.response_type == intent.IntentResponseType.ERROR + + +@pytest.mark.parametrize( + ("language", "light_name", "on_sentence", "off_sentence"), + [ + ("en", "test light", "turn on test light", "turn off test light"), + ("zh-cn", "卧室灯", "打开卧室灯", "关闭卧室灯"), + ("zh-hk", "睡房燈", "打開睡房燈", "關閉睡房燈"), + ("zh-tw", "臥室檯燈", "打開臥室檯燈", "關臥室檯燈"), + ], +) +@pytest.mark.usefixtures("init_components") +async def test_turn_on_off( + hass: HomeAssistant, + language: str, + light_name: str, + on_sentence: str, + off_sentence: str, +) -> None: + """Test turn on/off in multiple languages.""" + entity_id = "light.light1234" + hass.states.async_set( + entity_id, STATE_OFF, attributes={ATTR_FRIENDLY_NAME: light_name} + ) + + on_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + await conversation.async_converse( + hass, + on_sentence, + None, + Context(), + language=language, + ) + assert len(on_calls) == 1 + assert on_calls[0].data.get("entity_id") == [entity_id] + + off_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") + await conversation.async_converse( + hass, + off_sentence, + None, + Context(), + language=language, + ) + assert len(off_calls) == 1 + assert off_calls[0].data.get("entity_id") == [entity_id] From 233d927c01656956a868b483de0183c7c3761f66 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 11 Dec 2024 18:56:21 +0100 Subject: [PATCH 078/677] Update xknx to 3.4.0 (#132943) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index aed7f3ed455..55c19443aa0 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -10,7 +10,7 @@ "iot_class": "local_push", "loggers": ["xknx", "xknxproject"], "requirements": [ - "xknx==3.3.0", + "xknx==3.4.0", "xknxproject==3.8.1", "knx-frontend==2024.11.16.205004" ], diff --git a/requirements_all.txt b/requirements_all.txt index b263779e67f..e039a6b486b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3026,7 +3026,7 @@ xbox-webapi==2.1.0 xiaomi-ble==0.33.0 # homeassistant.components.knx -xknx==3.3.0 +xknx==3.4.0 # homeassistant.components.knx xknxproject==3.8.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d641a0fa4e2..f67bee3f32f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2421,7 +2421,7 @@ xbox-webapi==2.1.0 xiaomi-ble==0.33.0 # homeassistant.components.knx -xknx==3.3.0 +xknx==3.4.0 # homeassistant.components.knx xknxproject==3.8.1 From 3a7fc15656f85d1a6577976482a9e45c0c61a2a2 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Wed, 11 Dec 2024 19:01:20 +0100 Subject: [PATCH 079/677] Add Dutch locale on supported Alexa interfaces (#132936) --- .../components/alexa/capabilities.py | 19 +++++++++++++++++++ homeassistant/components/alexa/const.py | 1 + homeassistant/components/alexa/handlers.py | 1 + 3 files changed, 21 insertions(+) diff --git a/homeassistant/components/alexa/capabilities.py b/homeassistant/components/alexa/capabilities.py index 8672512acde..c5b4ad15904 100644 --- a/homeassistant/components/alexa/capabilities.py +++ b/homeassistant/components/alexa/capabilities.py @@ -317,6 +317,7 @@ class Alexa(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -403,6 +404,7 @@ class AlexaPowerController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -469,6 +471,7 @@ class AlexaLockController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -523,6 +526,7 @@ class AlexaSceneController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -562,6 +566,7 @@ class AlexaBrightnessController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -611,6 +616,7 @@ class AlexaColorController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -669,6 +675,7 @@ class AlexaColorTemperatureController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -715,6 +722,7 @@ class AlexaSpeaker(AlexaCapability): "fr-FR", # Not documented as of 2021-12-04, see PR #60489 "it-IT", "ja-JP", + "nl-NL", } def name(self) -> str: @@ -772,6 +780,7 @@ class AlexaStepSpeaker(AlexaCapability): "es-ES", "fr-FR", # Not documented as of 2021-12-04, see PR #60489 "it-IT", + "nl-NL", } def name(self) -> str: @@ -801,6 +810,7 @@ class AlexaPlaybackController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -859,6 +869,7 @@ class AlexaInputController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -1104,6 +1115,7 @@ class AlexaThermostatController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -1245,6 +1257,7 @@ class AlexaPowerLevelController(AlexaCapability): "fr-CA", "fr-FR", "it-IT", + "nl-NL", "ja-JP", } @@ -1723,6 +1736,7 @@ class AlexaRangeController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2066,6 +2080,7 @@ class AlexaToggleController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2212,6 +2227,7 @@ class AlexaPlaybackStateReporter(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2267,6 +2283,7 @@ class AlexaSeekController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2360,6 +2377,7 @@ class AlexaEqualizerController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } @@ -2470,6 +2488,7 @@ class AlexaCameraStreamController(AlexaCapability): "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", } diff --git a/homeassistant/components/alexa/const.py b/homeassistant/components/alexa/const.py index 4862e4d8a8c..27e9bbd5b67 100644 --- a/homeassistant/components/alexa/const.py +++ b/homeassistant/components/alexa/const.py @@ -59,6 +59,7 @@ CONF_SUPPORTED_LOCALES = ( "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", ) diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 21365076def..9b857ff4dfd 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -527,6 +527,7 @@ async def async_api_unlock( "hi-IN", "it-IT", "ja-JP", + "nl-NL", "pt-BR", }: msg = ( From 096d653059b2c38ed4c90452c4ecf9b61daf2023 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 11 Dec 2024 13:03:43 -0500 Subject: [PATCH 080/677] Record current IQS state for Russound RIO (#131219) --- .../russound_rio/quality_scale.yaml | 88 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 88 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/russound_rio/quality_scale.yaml diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml new file mode 100644 index 00000000000..603485705a3 --- /dev/null +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -0,0 +1,88 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: + status: exempt + comment: | + This integration uses a push API. No polling required. + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: | + Missing unique_id check in test_form() and test_import(). Test for adding same device twice missing. + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: + status: todo + comment: Can use RussoundConfigEntry in async_unload_entry + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: done + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: todo + test-coverage: todo + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + # Gold + entity-translations: + status: exempt + comment: | + There are no entities to translate. + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: + status: exempt + comment: | + This integration doesn't have enough / noisy entities that warrant being disabled by default. + discovery: todo + stale-devices: todo + diagnostics: done + exception-translations: done + icon-translations: todo + reconfiguration-flow: todo + dynamic-devices: todo + discovery-update-info: todo + repair-issues: done + docs-use-cases: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-data-update: todo + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration uses telnet exclusively and does not make http calls. + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index aa62b5a5120..a69311672da 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -890,7 +890,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "rtorrent", "rtsp_to_webrtc", "ruckus_unleashed", - "russound_rio", "russound_rnet", "ruuvi_gateway", "ruuvitag_ble", From fa05cc5e70df31f20d9a46a7c398b0b01db1b2de Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Wed, 11 Dec 2024 10:04:16 -0800 Subject: [PATCH 081/677] Add quality scale for nest integration (#131330) Co-authored-by: Joost Lekkerkerker Co-authored-by: Franck Nijhof --- .../components/nest/quality_scale.yaml | 86 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/nest/quality_scale.yaml diff --git a/homeassistant/components/nest/quality_scale.yaml b/homeassistant/components/nest/quality_scale.yaml new file mode 100644 index 00000000000..969ee66059d --- /dev/null +++ b/homeassistant/components/nest/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + config-flow: + status: todo + comment: Some fields are missing a data_description + brands: done + dependency-transparency: done + common-modules: + status: exempt + comment: The integration does not have a base entity or coordinator. + has-entity-name: done + action-setup: + status: exempt + comment: The integration does not register actions. + appropriate-polling: + status: exempt + comment: The integration does not poll. + test-before-configure: + status: todo + comment: | + The integration does a connection test in the configuration flow, however + it does not fail if the user has ipv6 misconfigured. + entity-event-setup: done + unique-config-entry: done + entity-unique-id: done + docs-installation-instructions: done + docs-removal-instructions: todo + test-before-setup: + status: todo + comment: | + The integration does tests on setup, however the most common issues + observed are related to ipv6 misconfigurations and the error messages + are not self explanatory and can be improved. + docs-high-level-description: done + config-flow-test-coverage: + status: todo + comment: | + The integration has full test coverage however it does not yet assert the specific contents of the + unique id of the created entry. Additional tests coverage for combinations of features like + `test_dhcp_discovery_with_creds` would also be useful. + Tests can be improved so that all end in either CREATE_ENTRY or ABORT. + docs-actions: done + runtime-data: done + + # Silver + log-when-unavailable: todo + config-entry-unloading: todo + reauthentication-flow: + status: todo + comment: | + Supports reauthentication, however can be improved to ensure the user does not change accounts + action-exceptions: todo + docs-installation-parameters: todo + integration-owner: todo + parallel-updates: todo + test-coverage: todo + docs-configuration-parameters: todo + entity-unavailable: todo + + # Gold + docs-examples: todo + discovery-update-info: todo + entity-device-class: todo + entity-translations: todo + docs-data-update: todo + entity-disabled-by-default: todo + discovery: todo + exception-translations: todo + devices: todo + docs-supported-devices: todo + icon-translations: todo + docs-known-limitations: todo + stale-devices: todo + docs-supported-functions: todo + repair-issues: todo + reconfiguration-flow: todo + entity-category: todo + dynamic-devices: todo + docs-troubleshooting: todo + diagnostics: todo + docs-use-cases: todo + + # Platinum + async-dependency: todo + strict-typing: todo + inject-websession: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index a69311672da..49f05b78a16 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -707,7 +707,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "neato", "nederlandse_spoorwegen", "ness_alarm", - "nest", "netatmo", "netdata", "netgear", From 0e8fe1eb41252b0241d9cc16e0bc8247bb842c3c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 11 Dec 2024 19:15:36 +0100 Subject: [PATCH 082/677] Improve coverage in light reproduce state (#132929) --- .../components/light/test_reproduce_state.py | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/tests/components/light/test_reproduce_state.py b/tests/components/light/test_reproduce_state.py index aa698129915..30a5e3f6842 100644 --- a/tests/components/light/test_reproduce_state.py +++ b/tests/components/light/test_reproduce_state.py @@ -193,6 +193,54 @@ async def test_filter_color_modes( assert len(turn_on_calls) == 1 +async def test_filter_color_modes_missing_attributes( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test warning on missing attribute when filtering for color mode.""" + color_mode = light.ColorMode.COLOR_TEMP + hass.states.async_set("light.entity", "off", {}) + expected_log = ( + "Color mode color_temp specified " + "but attribute color_temp missing for: light.entity" + ) + + turn_on_calls = async_mock_service(hass, "light", "turn_on") + + all_colors = { + **VALID_COLOR_TEMP, + **VALID_HS_COLOR, + **VALID_RGB_COLOR, + **VALID_RGBW_COLOR, + **VALID_RGBWW_COLOR, + **VALID_XY_COLOR, + **VALID_BRIGHTNESS, + } + + # Test missing `color_temp` attribute + stored_attributes = {**all_colors} + stored_attributes.pop("color_temp") + caplog.clear() + await async_reproduce_state( + hass, + [State("light.entity", "on", {**stored_attributes, "color_mode": color_mode})], + ) + assert len(turn_on_calls) == 0 + assert expected_log in caplog.text + + # Test with correct `color_temp` attribute + stored_attributes["color_temp"] = 240 + expected = {"brightness": 180, "color_temp": 240} + caplog.clear() + await async_reproduce_state( + hass, + [State("light.entity", "on", {**all_colors, "color_mode": color_mode})], + ) + assert len(turn_on_calls) == 1 + assert turn_on_calls[0].domain == "light" + assert dict(turn_on_calls[0].data) == {"entity_id": "light.entity", **expected} + assert expected_log not in caplog.text + + @pytest.mark.parametrize( "saved_state", [ From 833557fad5a136dc83b49e350b7999891eccb043 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 11 Dec 2024 19:16:49 +0100 Subject: [PATCH 083/677] Trigger full ci run on global mypy config change (#132909) --- .core_files.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.core_files.yaml b/.core_files.yaml index 6fd3a74df92..cc99487f68d 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -6,6 +6,7 @@ core: &core - homeassistant/helpers/** - homeassistant/package_constraints.txt - homeassistant/util/** + - mypy.ini - pyproject.toml - requirements.txt - setup.cfg From 73e68971e80a07d2a5b11a5540486228037d5148 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 11 Dec 2024 20:48:55 +0100 Subject: [PATCH 084/677] Remove port from Elgato configuration flow (#132961) --- homeassistant/components/elgato/config_flow.py | 9 ++------- homeassistant/components/elgato/coordinator.py | 3 +-- homeassistant/components/elgato/quality_scale.yaml | 5 +---- homeassistant/components/elgato/strings.json | 3 +-- tests/components/elgato/conftest.py | 3 +-- tests/components/elgato/snapshots/test_config_flow.ambr | 6 ------ tests/components/elgato/test_config_flow.py | 8 ++++---- 7 files changed, 10 insertions(+), 27 deletions(-) diff --git a/homeassistant/components/elgato/config_flow.py b/homeassistant/components/elgato/config_flow.py index 5329fcee90a..e20afc73a2d 100644 --- a/homeassistant/components/elgato/config_flow.py +++ b/homeassistant/components/elgato/config_flow.py @@ -9,7 +9,7 @@ import voluptuous as vol from homeassistant.components import onboarding, zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -34,7 +34,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): return self._async_show_setup_form() self.host = user_input[CONF_HOST] - self.port = user_input[CONF_PORT] try: await self._get_elgato_serial_number(raise_on_progress=False) @@ -49,7 +48,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): """Handle zeroconf discovery.""" self.host = discovery_info.host self.mac = discovery_info.properties.get("id") - self.port = discovery_info.port or 9123 try: await self._get_elgato_serial_number() @@ -81,7 +79,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema( { vol.Required(CONF_HOST): str, - vol.Optional(CONF_PORT, default=9123): int, } ), errors=errors or {}, @@ -93,7 +90,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): title=self.serial_number, data={ CONF_HOST: self.host, - CONF_PORT: self.port, CONF_MAC: self.mac, }, ) @@ -103,7 +99,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): session = async_get_clientsession(self.hass) elgato = Elgato( host=self.host, - port=self.port, session=session, ) info = await elgato.info() @@ -113,7 +108,7 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN): info.serial_number, raise_on_progress=raise_on_progress ) self._abort_if_unique_id_configured( - updates={CONF_HOST: self.host, CONF_PORT: self.port, CONF_MAC: self.mac} + updates={CONF_HOST: self.host, CONF_MAC: self.mac} ) self.serial_number = info.serial_number diff --git a/homeassistant/components/elgato/coordinator.py b/homeassistant/components/elgato/coordinator.py index c2bc79491a1..f3cf9216374 100644 --- a/homeassistant/components/elgato/coordinator.py +++ b/homeassistant/components/elgato/coordinator.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from elgato import BatteryInfo, Elgato, ElgatoConnectionError, Info, Settings, State from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -34,7 +34,6 @@ class ElgatoDataUpdateCoordinator(DataUpdateCoordinator[ElgatoData]): self.config_entry = entry self.client = Elgato( entry.data[CONF_HOST], - port=entry.data[CONF_PORT], session=async_get_clientsession(hass), ) super().__init__( diff --git a/homeassistant/components/elgato/quality_scale.yaml b/homeassistant/components/elgato/quality_scale.yaml index 301d00931d2..513940e2438 100644 --- a/homeassistant/components/elgato/quality_scale.yaml +++ b/homeassistant/components/elgato/quality_scale.yaml @@ -5,10 +5,7 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: - status: todo - comment: | - The data_description for port is missing. + config-flow: done dependency-transparency: done docs-actions: done docs-high-level-description: done diff --git a/homeassistant/components/elgato/strings.json b/homeassistant/components/elgato/strings.json index 6e1031c8ddf..727b8ee7024 100644 --- a/homeassistant/components/elgato/strings.json +++ b/homeassistant/components/elgato/strings.json @@ -5,8 +5,7 @@ "user": { "description": "Set up your Elgato Light to integrate with Home Assistant.", "data": { - "host": "[%key:common::config_flow::data::host%]", - "port": "[%key:common::config_flow::data::port%]" + "host": "[%key:common::config_flow::data::host%]" }, "data_description": { "host": "The hostname or IP address of your Elgato device." diff --git a/tests/components/elgato/conftest.py b/tests/components/elgato/conftest.py index 73b09421576..afa89f8eb27 100644 --- a/tests/components/elgato/conftest.py +++ b/tests/components/elgato/conftest.py @@ -7,7 +7,7 @@ from elgato import BatteryInfo, ElgatoNoBatteryError, Info, Settings, State import pytest from homeassistant.components.elgato.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, get_fixture_path, load_fixture @@ -35,7 +35,6 @@ def mock_config_entry() -> MockConfigEntry: data={ CONF_HOST: "127.0.0.1", CONF_MAC: "AA:BB:CC:DD:EE:FF", - CONF_PORT: 9123, }, unique_id="CN11A1A00001", ) diff --git a/tests/components/elgato/snapshots/test_config_flow.ambr b/tests/components/elgato/snapshots/test_config_flow.ambr index d5d005cff9c..522482ab602 100644 --- a/tests/components/elgato/snapshots/test_config_flow.ambr +++ b/tests/components/elgato/snapshots/test_config_flow.ambr @@ -8,7 +8,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': None, - 'port': 9123, }), 'description': None, 'description_placeholders': None, @@ -21,7 +20,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': None, - 'port': 9123, }), 'disabled_by': None, 'discovery_keys': dict({ @@ -53,7 +51,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, }), 'description': None, 'description_placeholders': None, @@ -66,7 +63,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, }), 'disabled_by': None, 'discovery_keys': dict({ @@ -97,7 +93,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, }), 'description': None, 'description_placeholders': None, @@ -110,7 +105,6 @@ 'data': dict({ 'host': '127.0.0.1', 'mac': 'AA:BB:CC:DD:EE:FF', - 'port': 9123, }), 'disabled_by': None, 'discovery_keys': dict({ diff --git a/tests/components/elgato/test_config_flow.py b/tests/components/elgato/test_config_flow.py index 6da99241b64..42abc0cde63 100644 --- a/tests/components/elgato/test_config_flow.py +++ b/tests/components/elgato/test_config_flow.py @@ -10,7 +10,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import zeroconf from homeassistant.components.elgato.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SOURCE +from homeassistant.const import CONF_HOST, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -33,7 +33,7 @@ async def test_full_user_flow_implementation( assert result.get("step_id") == "user" result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: "127.0.0.1", CONF_PORT: 9123} + result["flow_id"], user_input={CONF_HOST: "127.0.0.1"} ) assert result2.get("type") is FlowResultType.CREATE_ENTRY @@ -94,7 +94,7 @@ async def test_connection_error( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "127.0.0.1", CONF_PORT: 9123}, + data={CONF_HOST: "127.0.0.1"}, ) assert result.get("type") is FlowResultType.FORM @@ -135,7 +135,7 @@ async def test_user_device_exists_abort( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "127.0.0.1", CONF_PORT: 9123}, + data={CONF_HOST: "127.0.0.1"}, ) assert result.get("type") is FlowResultType.ABORT From 525614b7cda1440e94f8794a84d6f4fd5a6a410f Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Wed, 11 Dec 2024 20:52:20 +0100 Subject: [PATCH 085/677] Bump pylamarzocco to 1.4.0 (#132917) * Bump pylamarzocco to 1.4.0 * update device snapshot --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/lamarzocco/snapshots/test_diagnostics.ambr | 2 ++ 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 00e76096e7f..0d2111a2026 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -36,5 +36,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], - "requirements": ["pylamarzocco==1.3.3"] + "requirements": ["pylamarzocco==1.4.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index e039a6b486b..c6ab1e2dfae 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2030,7 +2030,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.3.3 +pylamarzocco==1.4.0 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f67bee3f32f..f9ed2bebf99 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1638,7 +1638,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.3.3 +pylamarzocco==1.4.0 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/tests/components/lamarzocco/snapshots/test_diagnostics.ambr b/tests/components/lamarzocco/snapshots/test_diagnostics.ambr index b185557bd08..b1d8140b2ce 100644 --- a/tests/components/lamarzocco/snapshots/test_diagnostics.ambr +++ b/tests/components/lamarzocco/snapshots/test_diagnostics.ambr @@ -3,6 +3,7 @@ dict({ 'config': dict({ 'backflush_enabled': False, + 'bbw_settings': None, 'boilers': dict({ 'CoffeeBoiler1': dict({ 'current_temperature': 96.5, @@ -44,6 +45,7 @@ }), }), 'prebrew_mode': 'TypeB', + 'scale': None, 'smart_standby': dict({ 'enabled': True, 'minutes': 10, From d43d84a67fa1a97ee7eb4bd60168ee81eceaaeb4 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:07:29 -0500 Subject: [PATCH 086/677] Add parallel updates & use typed config entry for Russound RIO (#132958) --- homeassistant/components/russound_rio/__init__.py | 2 +- homeassistant/components/russound_rio/media_player.py | 2 ++ homeassistant/components/russound_rio/quality_scale.yaml | 6 ++---- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index 784629ea0bc..b068fbd1892 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -58,7 +58,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): await entry.runtime_data.disconnect() diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 45818d3e25b..12b41485167 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -28,6 +28,8 @@ from .entity import RussoundBaseEntity, command _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + async def async_setup_platform( hass: HomeAssistant, diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 603485705a3..4c7214cfd8b 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -26,9 +26,7 @@ rules: entity-event-setup: done entity-unique-id: done has-entity-name: done - runtime-data: - status: todo - comment: Can use RussoundConfigEntry in async_unload_entry + runtime-data: done test-before-configure: done test-before-setup: done unique-config-entry: done @@ -42,7 +40,7 @@ rules: status: exempt comment: | This integration does not require authentication. - parallel-updates: todo + parallel-updates: done test-coverage: todo integration-owner: done docs-installation-parameters: todo From a1e4b3b0af1191b02bad30f281960a31b53e949b Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 11 Dec 2024 21:23:26 +0100 Subject: [PATCH 087/677] Update quality scale for nordpool (#132964) * Update quality scale for nordpool * more --- .../components/nordpool/quality_scale.yaml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/nordpool/quality_scale.yaml b/homeassistant/components/nordpool/quality_scale.yaml index 2cb0b655b17..79d5ac0ecea 100644 --- a/homeassistant/components/nordpool/quality_scale.yaml +++ b/homeassistant/components/nordpool/quality_scale.yaml @@ -20,8 +20,8 @@ rules: This integration does not provide additional actions. common-modules: done docs-high-level-description: done - docs-installation-instructions: todo - docs-removal-instructions: todo + docs-installation-instructions: done + docs-removal-instructions: done docs-actions: status: exempt comment: | @@ -39,7 +39,7 @@ rules: status: exempt comment: | This integration does not require authentication. - parallel-updates: todo + parallel-updates: done test-coverage: done integration-owner: done docs-installation-parameters: done @@ -78,16 +78,16 @@ rules: status: exempt comment: | This integration doesn't have any cases where raising an issue is needed. - docs-use-cases: todo + docs-use-cases: done docs-supported-devices: status: exempt comment: | Only service, no device docs-supported-functions: done - docs-data-update: todo - docs-known-limitations: todo + docs-data-update: done + docs-known-limitations: done docs-troubleshooting: todo - docs-examples: todo + docs-examples: done # Platinum async-dependency: done From 8e991fc92fe095079f74c46b3bf1be897bd881ef Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 11 Dec 2024 21:49:34 +0100 Subject: [PATCH 088/677] Merge feature branch with backup changes to dev (#132954) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Reapply "Make WS command backup/generate send events" (#131530) This reverts commit 9b8316df3f78d136ae73c096168bd73ffebc4465. * MVP implementation of Backup sync agents (#126122) * init sync agent * add syncing * root import * rename list to info and add sync state * Add base backup class * Revert unneded change * adjust tests * move to kitchen_sink * split * move * Adjustments * Adjustment * update * Tests * Test unknown agent * adjust * Adjust for different test environments * Change /info WS to contain a dictinary * reorder * Add websocket command to trigger sync from the supervisor * cleanup * Make mypy happier --------- Co-authored-by: Erik * Make BackupSyncMetadata model a dataclass (#130555) Make backup BackupSyncMetadata model a dataclass * Rename backup sync agent to backup agent (#130575) * Rename sync agent module to agent * Rename BackupSyncAgent to BackupAgent * Fix test typo * Rename async_get_backup_sync_agents to async_get_backup_agents * Rename and clean up remaining sync things * Update kitchen sink * Apply suggestions from code review * Update test_manager.py --------- Co-authored-by: Erik Montnemery * Add additional options to WS command backup/generate (#130530) * Add additional options to WS command backup/generate * Improve test * Improve test * Align parameter names in backup/agents/* WS commands (#130590) * Allow setting password for backups (#110630) * Allow setting password for backups * use is_hassio from helpers * move it * Fix getting psw * Fix restoring with psw * Address review comments * Improve docstring * Adjust kitchen sink * Adjust --------- Co-authored-by: Erik * Export relevant names from backup integration (#130596) * Tweak backup agent interface (#130613) * Tweak backup agent interface * Adjust kitchen_sink * Test kitchen sink backup (#130609) * Test agents_list_backups * Test agents_info * Test agents_download * Export Backup from manager * Test agents_upload * Update tests after rebase * Use backup domain * Remove WS command backup/upload (#130588) * Remove WS command backup/upload * Disable failing kitchen_sink test * Make local backup a backup agent (#130623) * Make local backup a backup agent * Adjust * Adjust * Adjust * Adjust tests * Adjust * Adjust * Adjust docstring * Adjust * Protect members of CoreLocalBackupAgent * Remove redundant check for file * Make the backup.create service use the first local agent * Add BackupAgent.async_get_backup * Fix some TODOs * Add support for downloading backup from a remote agent * Fix restore * Fix test * Adjust kitchen_sink test * Remove unused method BackupManager.async_get_backup_path * Re-enable kitchen sink test * Remove BaseBackupManager.async_upload_backup * Support restore from remote agent * Fix review comments * Include backup agent error in response to WS command backup/info (#130884) * Adjust code related to WS command backup/info (#130890) * Include backup agent error in response to WS command backup/details (#130892) * Remove LOCAL_AGENT_ID constant from backup manager (#130895) * Add backup config storage (#130871) * Add base for backup config * Allow updating backup config * Test loading backup config * Add backup config update method * Add temporary check for BackupAgent.async_remove_backup (#130893) * Rename backup slug to backup_id (#130902) * Improve backup websocket API tests (#130912) * Improve backup websocket API tests * Add missing snapshot * Fix tests leaving files behind * Improve backup manager backup creation tests (#130916) * Remove class backup.backup.LocalBackup (#130919) * Add agent delete backup (#130921) * Add backup agent delete backup * Remove agents delete websocket command * Update docstring Co-authored-by: Erik Montnemery --------- Co-authored-by: Erik Montnemery * Disable core local backup agent in hassio (#130933) * Rename remove backup to delete backup (#130940) * Rename remove backup to delete backup * Revert "backup/delete" * Refactor BackupManager (#130947) * Refactor BackupManager * Adjust * Adjust backup creation * Copy in executor * Fix BackupManager.async_get_backup (#130975) * Fix typo in backup tests (#130978) * Adjust backup NewBackup class (#130976) * Remove class backup.BackupUploadMetadata (#130977) Remove class backup.BackupMetadata * Report backup size in bytes instead of MB (#131028) Co-authored-by: Robert Resch * Speed up CI for feature branch (#131030) * Speed up CI for feature branch * adjust * fix * fix * fix * fix * Rename remove to delete in backup websocket type (#131023) * Revert "Speed up CI for feature branch" (#131074) Revert "Speed up CI for feature branch (#131030)" This reverts commit 791280506d1859b1a722f5064d75bcbe48acc1c3. * Rename class BaseBackup to AgentBackup (#131083) * Rename class BaseBackup to AgentBackup * Update tests * Speed up CI for backup feature branch (#131079) * Add backup platform to the hassio integration (#130991) * Add backup platform to the hassio integration * Add hassio to after_dependencies of backup * Address review comments * Remove redundant hassio parametrization of tests * Add tests * Address review comments * Bump CI cache version * Revert "Bump CI cache version" This reverts commit 2ab4d2b1795c953ccfc9b17c47f9df3faac83749. * Extend backup info class AgentBackup (#131110) * Extend backup info class AgentBackup * Update kitchen sink * Update kitchen sink test * Update kitchen sink test * Exclude cloud and hassio from core files (#131117) * Remove unnecessary **kwargs from backup API (#131124) * Fix backup tests (#131128) * Freeze backup dataclasses (#131122) * Protect CoreLocalBackupAgent.load_backups (#131126) * Use backup metadata v2 in core/container backups (#131125) * Extend backup creation API (#131121) * Extend backup creation API * Add tests * Fix merge * Fix merge * Return agent errors when deleting a backup (#131142) * Return agent errors when deleting a backup * Remove redundant calls to dict.keys() * Add enum type for backup folder (#131158) * Add method AgentBackup.from_dict (#131164) * Remove WS command backup/agents/list_backups (#131163) * Handle backup schedule (#131127) * Add backup schedule handling * Fix unrelated incorrect type annotation in test * Clarify delay save * Make the backup time compatible with the recorder nightly job * Update create backup parameters * Use typed dict for create backup parameters * Simplify schedule state * Group create backup parameters * Move parameter * Fix typo * Use Folder model * Handle deserialization of folders better * Fail on attempt to include addons or folders in core backup (#131204) * Fix AgentBackup test (#131201) * Add options to WS command backup/restore (#131194) * Add options to WS command backup/restore * Add tests * Fix test * Teach core backup to restore only database or only settings (#131225) * Exclude tmp_backups/*.tar from backups (#131243) * Add WS command backup/subscribe_events (#131250) * Clean up temporary directory after restoring backup (#131263) * Improve hassio backup agent list (#131268) * Include `last_automatic_backup` in reply to backup/info (#131293) Include last_automatic_backup in reply to backup/info * Handle backup delete after config (#131259) * Handle delete after copies * Handle delete after days * Add some test examples * Test config_delete_after_logic * Test config_delete_after_copies_logic * Test more delete after days * Add debug logs * Always delete the oldest backup first * Never remove the last backup * Clean up words Co-authored-by: Erik Montnemery * Fix after cleaning words * Use utcnow * Remove duplicate guard * Simplify sorting * Delete backups even if there are agent errors on get backups --------- Co-authored-by: Erik Montnemery * Rename backup delete after to backup retention (#131364) * Rename backup delete after to backup retention * Tweak * Remove length limit on `agent_ids` when configuring backup (#132057) Remove length limit on agent_ids when configuring backup * Rename backup retention_config to retention (#132068) * Modify backup agent API to be stream oriented (#132090) * Modify backup agent API to be stream oriented * Fix tests * Adjust after code review * Remove no longer needed pylint override * Improve test coverage * Change BackupAgent API to work with AsyncIterator objects * Don't close files in the event loop * Don't close files in the event loop * Fix backup manager create backup log (#132174) * Fix debug log level (#132186) * Add cloud backup agent (#129621) * Init cloud backup sync * Add more metadata * Fix typo * Adjust to base changes * Don't raise on list if more than one backup is available * Adjust to base branch * Fetch always and verify on download * Update homeassistant/components/cloud/backup.py Co-authored-by: Martin Hjelmare * Adjust to base branch changes * Not required anymore * Workaround * Fix blocking event loop * Fix * Add some tests * some tests * Add cloud backup delete functionality * Enable check * Fix ruff * Use fixture * Use iter_chunks instead * Remove read * Remove explicit export of read_backup * Align with BackupAgent API changes * Improve test coverage * Improve error handling * Adjust docstrings * Catch aiohttp.ClientError bubbling up from hass_nabucasa * Improve iteration --------- Co-authored-by: Erik Co-authored-by: Robert Resch Co-authored-by: Martin Hjelmare Co-authored-by: Krisjanis Lejejs * Extract file receiver from `BackupManager.async_receive_backup` to util (#132271) * Extract file receiver from BackupManager.async_receive_backup to util * Apply suggestions from code review Co-authored-by: Martin Hjelmare --------- Co-authored-by: Martin Hjelmare * Make sure backup directory exists (#132269) * Make sure backup directory exists * Hand off directory creation to executor * Use mkdir's exist_ok feeature * Organize BackupManager instance attributes (#132277) * Don't store received backups in a TempDir (#132272) * Don't store received backups in a TempDir * Fix tests * Make sure backup directory exists * Address review comments * Fix tests * Rewrite backup manager state handling (#132375) * Rewrite backup manager state handling * Address review comments * Modify backup reader/writer API to be stream oriented (#132464) * Internalize backup tasks (#132482) * Internalize backup tasks * Update test after rebase * Handle backup error during automatic backup (#132511) * Improve backup manager state logging (#132549) * Fix backup manager state when restore completes (#132548) * Remove WS command backup/agents/download (#132664) * Add WS command backup/generate_with_stored_settings (#132671) * Add WS command backup/generate_with_stored_settings * Register the new command, add tests * Refactor local agent backup tests (#132683) * Refactor test_load_backups * Refactor test loading agents * Refactor test_delete_backup * Refactor test_upload * Clean up duplicate tests * Refactor backup manager receive tests (#132701) * Refactor backup manager receive tests * Clean up * Refactor pre and post platform tests (#132708) * Refactor backup pre platform test * Refactor backup post platform test * Bump aiohasupervisor to version 0.2.2b0 (#132704) * Bump aiohasupervisor to version 0.2.2b0 * Adjust tests * Publish event when manager is idle after creating backup (#132724) * Handle busy backup manager when uploading backup (#132736) * Adjust hassio backup agent to supervisor changes (#132732) * Adjust hassio backup agent to supervisor changes * Fix typo * Refactor test for create backup with wrong parameters (#132763) * Refactor test not loading bad backup platforms (#132769) * Improve receive backup coverage (#132758) * Refactor initiate backup test (#132829) * Rename Backup to ManagerBackup (#132841) * Refactor backup config (#132845) * Refactor backup config * Remove unnecessary condition * Adjust tests * Improve initiate backup test (#132858) * Store the time of automatic backup attempts (#132860) * Store the time of automatic backup attempts * Address review comments * Update test * Update cloud test * Save agent failures when creating backups (#132850) * Save agent failures when creating backups * Update tests * Store KnownBackups * Add test * Only clear known_backups on no error, add tests * Address review comments * Store known backups as a list * Update tests * Track all backups created with backup strategy settings (#132916) * Track all backups created with saved settings * Rename * Add explicit call to save the store * Don't register service backup.create in HassOS installations (#132932) * Revert changes to action service backup.create (#132938) * Fix logic for cleaning up temporary backup file (#132934) * Fix logic for cleaning up temporary backup file * Reduce scope of patch * Fix with_strategy_settings info not sent over websocket (#132939) * Fix with_strategy_settings info not sent over websocket * Fix kitchen sink tests * Fix cloud and hassio tests * Revert backup ci changes (#132955) Revert changes speeding up CI * Fix revert of CI changes (#132960) --------- Co-authored-by: Joakim Sørensen Co-authored-by: Martin Hjelmare Co-authored-by: Robert Resch Co-authored-by: Paul Bottein Co-authored-by: Krisjanis Lejejs --- homeassistant/backup_restore.py | 101 +- homeassistant/components/backup/__init__.py | 75 +- homeassistant/components/backup/agent.py | 100 + homeassistant/components/backup/backup.py | 124 + homeassistant/components/backup/config.py | 444 +++ homeassistant/components/backup/const.py | 7 + homeassistant/components/backup/http.py | 55 +- homeassistant/components/backup/manager.py | 1262 ++++++-- homeassistant/components/backup/manifest.json | 3 +- homeassistant/components/backup/models.py | 61 + homeassistant/components/backup/store.py | 52 + homeassistant/components/backup/util.py | 111 + homeassistant/components/backup/websocket.py | 220 +- homeassistant/components/cloud/backup.py | 196 ++ homeassistant/components/cloud/manifest.json | 7 +- homeassistant/components/hassio/backup.py | 365 +++ homeassistant/components/hassio/manifest.json | 2 +- .../components/kitchen_sink/backup.py | 92 + homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 3 +- requirements_test_all.txt | 3 +- tests/components/backup/common.py | 153 +- tests/components/backup/conftest.py | 97 + .../backup/snapshots/test_backup.ambr | 206 ++ .../backup/snapshots/test_websocket.ambr | 2748 ++++++++++++++++- tests/components/backup/test_backup.py | 129 + tests/components/backup/test_http.py | 42 +- tests/components/backup/test_init.py | 22 +- tests/components/backup/test_manager.py | 1074 +++++-- tests/components/backup/test_models.py | 11 + tests/components/backup/test_websocket.py | 1600 +++++++++- tests/components/cloud/test_backup.py | 568 ++++ tests/components/conftest.py | 4 + tests/components/hassio/test_backup.py | 403 +++ tests/components/kitchen_sink/test_backup.py | 194 ++ tests/test_backup_restore.py | 210 +- 38 files changed, 9977 insertions(+), 773 deletions(-) create mode 100644 homeassistant/components/backup/agent.py create mode 100644 homeassistant/components/backup/backup.py create mode 100644 homeassistant/components/backup/config.py create mode 100644 homeassistant/components/backup/models.py create mode 100644 homeassistant/components/backup/store.py create mode 100644 homeassistant/components/backup/util.py create mode 100644 homeassistant/components/cloud/backup.py create mode 100644 homeassistant/components/hassio/backup.py create mode 100644 homeassistant/components/kitchen_sink/backup.py create mode 100644 tests/components/backup/conftest.py create mode 100644 tests/components/backup/snapshots/test_backup.ambr create mode 100644 tests/components/backup/test_backup.py create mode 100644 tests/components/backup/test_models.py create mode 100644 tests/components/cloud/test_backup.py create mode 100644 tests/components/hassio/test_backup.py create mode 100644 tests/components/kitchen_sink/test_backup.py diff --git a/homeassistant/backup_restore.py b/homeassistant/backup_restore.py index 32991dfb2d3..f9250e3129e 100644 --- a/homeassistant/backup_restore.py +++ b/homeassistant/backup_restore.py @@ -1,6 +1,10 @@ """Home Assistant module to handle restoring backups.""" +from __future__ import annotations + +from collections.abc import Iterable from dataclasses import dataclass +import hashlib import json import logging from pathlib import Path @@ -14,7 +18,12 @@ import securetar from .const import __version__ as HA_VERSION RESTORE_BACKUP_FILE = ".HA_RESTORE" -KEEP_PATHS = ("backups",) +KEEP_BACKUPS = ("backups",) +KEEP_DATABASE = ( + "home-assistant_v2.db", + "home-assistant_v2.db-wal", +) + _LOGGER = logging.getLogger(__name__) @@ -24,6 +33,21 @@ class RestoreBackupFileContent: """Definition for restore backup file content.""" backup_file_path: Path + password: str | None + remove_after_restore: bool + restore_database: bool + restore_homeassistant: bool + + +def password_to_key(password: str) -> bytes: + """Generate a AES Key from password. + + Matches the implementation in supervisor.backups.utils.password_to_key. + """ + key: bytes = password.encode() + for _ in range(100): + key = hashlib.sha256(key).digest() + return key[:16] def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None: @@ -32,20 +56,24 @@ def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | try: instruction_content = json.loads(instruction_path.read_text(encoding="utf-8")) return RestoreBackupFileContent( - backup_file_path=Path(instruction_content["path"]) + backup_file_path=Path(instruction_content["path"]), + password=instruction_content["password"], + remove_after_restore=instruction_content["remove_after_restore"], + restore_database=instruction_content["restore_database"], + restore_homeassistant=instruction_content["restore_homeassistant"], ) - except (FileNotFoundError, json.JSONDecodeError): + except (FileNotFoundError, KeyError, json.JSONDecodeError): return None -def _clear_configuration_directory(config_dir: Path) -> None: - """Delete all files and directories in the config directory except for the backups directory.""" - keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS] - config_contents = sorted( - [entry for entry in config_dir.iterdir() if entry not in keep_paths] +def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None: + """Delete all files and directories in the config directory except entries in the keep list.""" + keep_paths = [config_dir.joinpath(path) for path in keep] + entries_to_remove = sorted( + entry for entry in config_dir.iterdir() if entry not in keep_paths ) - for entry in config_contents: + for entry in entries_to_remove: entrypath = config_dir.joinpath(entry) if entrypath.is_file(): @@ -54,12 +82,15 @@ def _clear_configuration_directory(config_dir: Path) -> None: shutil.rmtree(entrypath) -def _extract_backup(config_dir: Path, backup_file_path: Path) -> None: +def _extract_backup( + config_dir: Path, + restore_content: RestoreBackupFileContent, +) -> None: """Extract the backup file to the config directory.""" with ( TemporaryDirectory() as tempdir, securetar.SecureTarFile( - backup_file_path, + restore_content.backup_file_path, gzip=False, mode="r", ) as ostf, @@ -88,22 +119,41 @@ def _extract_backup(config_dir: Path, backup_file_path: Path) -> None: f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}", ), gzip=backup_meta["compressed"], + key=password_to_key(restore_content.password) + if restore_content.password is not None + else None, mode="r", ) as istf: - for member in istf.getmembers(): - if member.name == "data": - continue - member.name = member.name.replace("data/", "") - _clear_configuration_directory(config_dir) istf.extractall( - path=config_dir, - members=[ - member - for member in securetar.secure_path(istf) - if member.name != "data" - ], + path=Path(tempdir, "homeassistant"), + members=securetar.secure_path(istf), filter="fully_trusted", ) + if restore_content.restore_homeassistant: + keep = list(KEEP_BACKUPS) + if not restore_content.restore_database: + keep.extend(KEEP_DATABASE) + _clear_configuration_directory(config_dir, keep) + shutil.copytree( + Path(tempdir, "homeassistant", "data"), + config_dir, + dirs_exist_ok=True, + ignore=shutil.ignore_patterns(*(keep)), + ) + elif restore_content.restore_database: + for entry in KEEP_DATABASE: + entrypath = config_dir / entry + + if entrypath.is_file(): + entrypath.unlink() + elif entrypath.is_dir(): + shutil.rmtree(entrypath) + + for entry in KEEP_DATABASE: + shutil.copy( + Path(tempdir, "homeassistant", "data", entry), + config_dir, + ) def restore_backup(config_dir_path: str) -> bool: @@ -119,8 +169,13 @@ def restore_backup(config_dir_path: str) -> bool: backup_file_path = restore_content.backup_file_path _LOGGER.info("Restoring %s", backup_file_path) try: - _extract_backup(config_dir, backup_file_path) + _extract_backup( + config_dir=config_dir, + restore_content=restore_content, + ) except FileNotFoundError as err: raise ValueError(f"Backup file {backup_file_path} does not exist") from err + if restore_content.remove_after_restore: + backup_file_path.unlink(missing_ok=True) _LOGGER.info("Restore complete, restarting") return True diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index 200cb4a3f65..f1a6f3be196 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -5,36 +5,81 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.typing import ConfigType -from .const import DATA_MANAGER, DOMAIN, LOGGER +from .agent import ( + BackupAgent, + BackupAgentError, + BackupAgentPlatformProtocol, + LocalBackupAgent, +) +from .const import DATA_MANAGER, DOMAIN from .http import async_register_http_views -from .manager import BackupManager +from .manager import ( + BackupManager, + BackupPlatformProtocol, + BackupReaderWriter, + CoreBackupReaderWriter, + CreateBackupEvent, + ManagerBackup, + NewBackup, + WrittenBackup, +) +from .models import AddonInfo, AgentBackup, Folder from .websocket import async_register_websocket_handlers +__all__ = [ + "AddonInfo", + "AgentBackup", + "ManagerBackup", + "BackupAgent", + "BackupAgentError", + "BackupAgentPlatformProtocol", + "BackupPlatformProtocol", + "BackupReaderWriter", + "CreateBackupEvent", + "Folder", + "LocalBackupAgent", + "NewBackup", + "WrittenBackup", +] + CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Backup integration.""" - backup_manager = BackupManager(hass) - hass.data[DATA_MANAGER] = backup_manager - with_hassio = is_hassio(hass) + reader_writer: BackupReaderWriter + if not with_hassio: + reader_writer = CoreBackupReaderWriter(hass) + else: + # pylint: disable-next=import-outside-toplevel, hass-component-root-import + from homeassistant.components.hassio.backup import SupervisorBackupReaderWriter + + reader_writer = SupervisorBackupReaderWriter(hass) + + backup_manager = BackupManager(hass, reader_writer) + hass.data[DATA_MANAGER] = backup_manager + await backup_manager.async_setup() + async_register_websocket_handlers(hass, with_hassio) - if with_hassio: - if DOMAIN in config: - LOGGER.error( - "The backup integration is not supported on this installation method, " - "please remove it from your configuration" - ) - return True - async def async_handle_create_service(call: ServiceCall) -> None: """Service handler for creating backups.""" - await backup_manager.async_create_backup() + agent_id = list(backup_manager.local_backup_agents)[0] + await backup_manager.async_create_backup( + agent_ids=[agent_id], + include_addons=None, + include_all_addons=False, + include_database=True, + include_folders=None, + include_homeassistant=True, + name=None, + password=None, + ) - hass.services.async_register(DOMAIN, "create", async_handle_create_service) + if not with_hassio: + hass.services.async_register(DOMAIN, "create", async_handle_create_service) async_register_http_views(hass) diff --git a/homeassistant/components/backup/agent.py b/homeassistant/components/backup/agent.py new file mode 100644 index 00000000000..36f2e7ee34e --- /dev/null +++ b/homeassistant/components/backup/agent.py @@ -0,0 +1,100 @@ +"""Backup agents for the Backup integration.""" + +from __future__ import annotations + +import abc +from collections.abc import AsyncIterator, Callable, Coroutine +from pathlib import Path +from typing import Any, Protocol + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .models import AgentBackup + + +class BackupAgentError(HomeAssistantError): + """Base class for backup agent errors.""" + + +class BackupAgentUnreachableError(BackupAgentError): + """Raised when the agent can't reach its API.""" + + _message = "The backup agent is unreachable." + + +class BackupAgent(abc.ABC): + """Backup agent interface.""" + + name: str + + @abc.abstractmethod + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + :return: An async iterator that yields bytes. + """ + + @abc.abstractmethod + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup. + + :param open_stream: A function returning an async iterator that yields bytes. + :param backup: Metadata about the backup that should be uploaded. + """ + + @abc.abstractmethod + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + """ + + @abc.abstractmethod + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + + @abc.abstractmethod + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + + +class LocalBackupAgent(BackupAgent): + """Local backup agent.""" + + @abc.abstractmethod + def get_backup_path(self, backup_id: str) -> Path: + """Return the local path to a backup. + + The method should return the path to the backup file with the specified id. + """ + + +class BackupAgentPlatformProtocol(Protocol): + """Define the format of backup platforms which implement backup agents.""" + + async def async_get_backup_agents( + self, + hass: HomeAssistant, + **kwargs: Any, + ) -> list[BackupAgent]: + """Return a list of backup agents.""" diff --git a/homeassistant/components/backup/backup.py b/homeassistant/components/backup/backup.py new file mode 100644 index 00000000000..b9aad89c7f3 --- /dev/null +++ b/homeassistant/components/backup/backup.py @@ -0,0 +1,124 @@ +"""Local backup support for Core and Container installations.""" + +from __future__ import annotations + +from collections.abc import AsyncIterator, Callable, Coroutine +import json +from pathlib import Path +from tarfile import TarError +from typing import Any + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.hassio import is_hassio + +from .agent import BackupAgent, LocalBackupAgent +from .const import LOGGER +from .models import AgentBackup +from .util import read_backup + + +async def async_get_backup_agents( + hass: HomeAssistant, + **kwargs: Any, +) -> list[BackupAgent]: + """Return the local backup agent.""" + if is_hassio(hass): + return [] + return [CoreLocalBackupAgent(hass)] + + +class CoreLocalBackupAgent(LocalBackupAgent): + """Local backup agent for Core and Container installations.""" + + name = "local" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the backup agent.""" + super().__init__() + self._hass = hass + self._backup_dir = Path(hass.config.path("backups")) + self._backups: dict[str, AgentBackup] = {} + self._loaded_backups = False + + async def _load_backups(self) -> None: + """Load data of stored backup files.""" + backups = await self._hass.async_add_executor_job(self._read_backups) + LOGGER.debug("Loaded %s local backups", len(backups)) + self._backups = backups + self._loaded_backups = True + + def _read_backups(self) -> dict[str, AgentBackup]: + """Read backups from disk.""" + backups: dict[str, AgentBackup] = {} + for backup_path in self._backup_dir.glob("*.tar"): + try: + backup = read_backup(backup_path) + backups[backup.backup_id] = backup + except (OSError, TarError, json.JSONDecodeError, KeyError) as err: + LOGGER.warning("Unable to read backup %s: %s", backup_path, err) + return backups + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + raise NotImplementedError + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + self._backups[backup.backup_id] = backup + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + if not self._loaded_backups: + await self._load_backups() + return list(self._backups.values()) + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + if not self._loaded_backups: + await self._load_backups() + + if not (backup := self._backups.get(backup_id)): + return None + + backup_path = self.get_backup_path(backup_id) + if not await self._hass.async_add_executor_job(backup_path.exists): + LOGGER.debug( + ( + "Removing tracked backup (%s) that does not exists on the expected" + " path %s" + ), + backup.backup_id, + backup_path, + ) + self._backups.pop(backup_id) + return None + + return backup + + def get_backup_path(self, backup_id: str) -> Path: + """Return the local path to a backup.""" + return self._backup_dir / f"{backup_id}.tar" + + async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: + """Delete a backup file.""" + if await self.async_get_backup(backup_id) is None: + return + + backup_path = self.get_backup_path(backup_id) + await self._hass.async_add_executor_job(backup_path.unlink, True) + LOGGER.debug("Deleted backup located at %s", backup_path) + self._backups.pop(backup_id) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py new file mode 100644 index 00000000000..6304d0aa90b --- /dev/null +++ b/homeassistant/components/backup/config.py @@ -0,0 +1,444 @@ +"""Provide persistent configuration for the backup integration.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable +from dataclasses import dataclass, field, replace +from datetime import datetime, timedelta +from enum import StrEnum +from typing import TYPE_CHECKING, Self, TypedDict + +from cronsim import CronSim + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.event import async_call_later, async_track_point_in_time +from homeassistant.helpers.typing import UNDEFINED, UndefinedType +from homeassistant.util import dt as dt_util + +from .const import LOGGER +from .models import Folder + +if TYPE_CHECKING: + from .manager import BackupManager, ManagerBackup + +# The time of the automatic backup event should be compatible with +# the time of the recorder's nightly job which runs at 04:12. +# Run the backup at 04:45. +CRON_PATTERN_DAILY = "45 4 * * *" +CRON_PATTERN_WEEKLY = "45 4 * * {}" + + +class StoredBackupConfig(TypedDict): + """Represent the stored backup config.""" + + create_backup: StoredCreateBackupConfig + last_attempted_strategy_backup: datetime | None + last_completed_strategy_backup: datetime | None + retention: StoredRetentionConfig + schedule: StoredBackupSchedule + + +@dataclass(kw_only=True) +class BackupConfigData: + """Represent loaded backup config data.""" + + create_backup: CreateBackupConfig + last_attempted_strategy_backup: datetime | None = None + last_completed_strategy_backup: datetime | None = None + retention: RetentionConfig + schedule: BackupSchedule + + @classmethod + def from_dict(cls, data: StoredBackupConfig) -> Self: + """Initialize backup config data from a dict.""" + include_folders_data = data["create_backup"]["include_folders"] + if include_folders_data: + include_folders = [Folder(folder) for folder in include_folders_data] + else: + include_folders = None + retention = data["retention"] + + return cls( + create_backup=CreateBackupConfig( + agent_ids=data["create_backup"]["agent_ids"], + include_addons=data["create_backup"]["include_addons"], + include_all_addons=data["create_backup"]["include_all_addons"], + include_database=data["create_backup"]["include_database"], + include_folders=include_folders, + name=data["create_backup"]["name"], + password=data["create_backup"]["password"], + ), + last_attempted_strategy_backup=data["last_attempted_strategy_backup"], + last_completed_strategy_backup=data["last_completed_strategy_backup"], + retention=RetentionConfig( + copies=retention["copies"], + days=retention["days"], + ), + schedule=BackupSchedule(state=ScheduleState(data["schedule"]["state"])), + ) + + def to_dict(self) -> StoredBackupConfig: + """Convert backup config data to a dict.""" + return StoredBackupConfig( + create_backup=self.create_backup.to_dict(), + last_attempted_strategy_backup=self.last_attempted_strategy_backup, + last_completed_strategy_backup=self.last_completed_strategy_backup, + retention=self.retention.to_dict(), + schedule=self.schedule.to_dict(), + ) + + +class BackupConfig: + """Handle backup config.""" + + def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None: + """Initialize backup config.""" + self.data = BackupConfigData( + create_backup=CreateBackupConfig(), + retention=RetentionConfig(), + schedule=BackupSchedule(), + ) + self._manager = manager + + def load(self, stored_config: StoredBackupConfig) -> None: + """Load config.""" + self.data = BackupConfigData.from_dict(stored_config) + self.data.schedule.apply(self._manager) + + async def update( + self, + *, + create_backup: CreateBackupParametersDict | UndefinedType = UNDEFINED, + retention: RetentionParametersDict | UndefinedType = UNDEFINED, + schedule: ScheduleState | UndefinedType = UNDEFINED, + ) -> None: + """Update config.""" + if create_backup is not UNDEFINED: + self.data.create_backup = replace(self.data.create_backup, **create_backup) + if retention is not UNDEFINED: + new_retention = RetentionConfig(**retention) + if new_retention != self.data.retention: + self.data.retention = new_retention + self.data.retention.apply(self._manager) + if schedule is not UNDEFINED: + new_schedule = BackupSchedule(state=schedule) + if new_schedule.to_dict() != self.data.schedule.to_dict(): + self.data.schedule = new_schedule + self.data.schedule.apply(self._manager) + + self._manager.store.save() + + +@dataclass(kw_only=True) +class RetentionConfig: + """Represent the backup retention configuration.""" + + copies: int | None = None + days: int | None = None + + def apply(self, manager: BackupManager) -> None: + """Apply backup retention configuration.""" + if self.days is not None: + self._schedule_next(manager) + else: + self._unschedule_next(manager) + + def to_dict(self) -> StoredRetentionConfig: + """Convert backup retention configuration to a dict.""" + return StoredRetentionConfig( + copies=self.copies, + days=self.days, + ) + + @callback + def _schedule_next( + self, + manager: BackupManager, + ) -> None: + """Schedule the next delete after days.""" + self._unschedule_next(manager) + + async def _delete_backups(now: datetime) -> None: + """Delete backups older than days.""" + self._schedule_next(manager) + + def _backups_filter( + backups: dict[str, ManagerBackup], + ) -> dict[str, ManagerBackup]: + """Return backups older than days to delete.""" + # we need to check here since we await before + # this filter is applied + if self.days is None: + return {} + now = dt_util.utcnow() + return { + backup_id: backup + for backup_id, backup in backups.items() + if dt_util.parse_datetime(backup.date, raise_on_error=True) + + timedelta(days=self.days) + < now + } + + await _delete_filtered_backups(manager, _backups_filter) + + manager.remove_next_delete_event = async_call_later( + manager.hass, timedelta(days=1), _delete_backups + ) + + @callback + def _unschedule_next(self, manager: BackupManager) -> None: + """Unschedule the next delete after days.""" + if (remove_next_event := manager.remove_next_delete_event) is not None: + remove_next_event() + manager.remove_next_delete_event = None + + +class StoredRetentionConfig(TypedDict): + """Represent the stored backup retention configuration.""" + + copies: int | None + days: int | None + + +class RetentionParametersDict(TypedDict, total=False): + """Represent the parameters for retention.""" + + copies: int | None + days: int | None + + +class StoredBackupSchedule(TypedDict): + """Represent the stored backup schedule configuration.""" + + state: ScheduleState + + +class ScheduleState(StrEnum): + """Represent the schedule state.""" + + NEVER = "never" + DAILY = "daily" + MONDAY = "mon" + TUESDAY = "tue" + WEDNESDAY = "wed" + THURSDAY = "thu" + FRIDAY = "fri" + SATURDAY = "sat" + SUNDAY = "sun" + + +@dataclass(kw_only=True) +class BackupSchedule: + """Represent the backup schedule.""" + + state: ScheduleState = ScheduleState.NEVER + cron_event: CronSim | None = field(init=False, default=None) + + @callback + def apply( + self, + manager: BackupManager, + ) -> None: + """Apply a new schedule. + + There are only three possible state types: never, daily, or weekly. + """ + if self.state is ScheduleState.NEVER: + self._unschedule_next(manager) + return + + if self.state is ScheduleState.DAILY: + self._schedule_next(CRON_PATTERN_DAILY, manager) + else: + self._schedule_next( + CRON_PATTERN_WEEKLY.format(self.state.value), + manager, + ) + + @callback + def _schedule_next( + self, + cron_pattern: str, + manager: BackupManager, + ) -> None: + """Schedule the next backup.""" + self._unschedule_next(manager) + now = dt_util.now() + if (cron_event := self.cron_event) is None: + seed_time = manager.config.data.last_completed_strategy_backup or now + cron_event = self.cron_event = CronSim(cron_pattern, seed_time) + next_time = next(cron_event) + + if next_time < now: + # schedule a backup at next daily time once + # if we missed the last scheduled backup + cron_event = CronSim(CRON_PATTERN_DAILY, now) + next_time = next(cron_event) + # reseed the cron event attribute + # add a day to the next time to avoid scheduling at the same time again + self.cron_event = CronSim(cron_pattern, now + timedelta(days=1)) + + async def _create_backup(now: datetime) -> None: + """Create backup.""" + manager.remove_next_backup_event = None + config_data = manager.config.data + self._schedule_next(cron_pattern, manager) + + # create the backup + try: + await manager.async_create_backup( + agent_ids=config_data.create_backup.agent_ids, + include_addons=config_data.create_backup.include_addons, + include_all_addons=config_data.create_backup.include_all_addons, + include_database=config_data.create_backup.include_database, + include_folders=config_data.create_backup.include_folders, + include_homeassistant=True, # always include HA + name=config_data.create_backup.name, + password=config_data.create_backup.password, + with_strategy_settings=True, + ) + except Exception: # noqa: BLE001 + # another more specific exception will be added + # and handled in the future + LOGGER.exception("Unexpected error creating automatic backup") + + # delete old backups more numerous than copies + + def _backups_filter( + backups: dict[str, ManagerBackup], + ) -> dict[str, ManagerBackup]: + """Return oldest backups more numerous than copies to delete.""" + # we need to check here since we await before + # this filter is applied + if config_data.retention.copies is None: + return {} + return dict( + sorted( + backups.items(), + key=lambda backup_item: backup_item[1].date, + )[: len(backups) - config_data.retention.copies] + ) + + await _delete_filtered_backups(manager, _backups_filter) + + manager.remove_next_backup_event = async_track_point_in_time( + manager.hass, _create_backup, next_time + ) + + def to_dict(self) -> StoredBackupSchedule: + """Convert backup schedule to a dict.""" + return StoredBackupSchedule(state=self.state) + + @callback + def _unschedule_next(self, manager: BackupManager) -> None: + """Unschedule the next backup.""" + if (remove_next_event := manager.remove_next_backup_event) is not None: + remove_next_event() + manager.remove_next_backup_event = None + + +@dataclass(kw_only=True) +class CreateBackupConfig: + """Represent the config for async_create_backup.""" + + agent_ids: list[str] = field(default_factory=list) + include_addons: list[str] | None = None + include_all_addons: bool = False + include_database: bool = True + include_folders: list[Folder] | None = None + name: str | None = None + password: str | None = None + + def to_dict(self) -> StoredCreateBackupConfig: + """Convert create backup config to a dict.""" + return { + "agent_ids": self.agent_ids, + "include_addons": self.include_addons, + "include_all_addons": self.include_all_addons, + "include_database": self.include_database, + "include_folders": self.include_folders, + "name": self.name, + "password": self.password, + } + + +class StoredCreateBackupConfig(TypedDict): + """Represent the stored config for async_create_backup.""" + + agent_ids: list[str] + include_addons: list[str] | None + include_all_addons: bool + include_database: bool + include_folders: list[Folder] | None + name: str | None + password: str | None + + +class CreateBackupParametersDict(TypedDict, total=False): + """Represent the parameters for async_create_backup.""" + + agent_ids: list[str] + include_addons: list[str] | None + include_all_addons: bool + include_database: bool + include_folders: list[Folder] | None + name: str | None + password: str | None + + +async def _delete_filtered_backups( + manager: BackupManager, + backup_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]], +) -> None: + """Delete backups parsed with a filter. + + :param manager: The backup manager. + :param backup_filter: A filter that should return the backups to delete. + """ + backups, get_agent_errors = await manager.async_get_backups() + if get_agent_errors: + LOGGER.debug( + "Error getting backups; continuing anyway: %s", + get_agent_errors, + ) + + LOGGER.debug("Total backups: %s", backups) + + filtered_backups = backup_filter(backups) + + if not filtered_backups: + return + + # always delete oldest backup first + filtered_backups = dict( + sorted( + filtered_backups.items(), + key=lambda backup_item: backup_item[1].date, + ) + ) + + if len(filtered_backups) >= len(backups): + # Never delete the last backup. + last_backup = filtered_backups.popitem() + LOGGER.debug("Keeping the last backup: %s", last_backup) + + LOGGER.debug("Backups to delete: %s", filtered_backups) + + if not filtered_backups: + return + + backup_ids = list(filtered_backups) + delete_results = await asyncio.gather( + *(manager.async_delete_backup(backup_id) for backup_id in filtered_backups) + ) + agent_errors = { + backup_id: error + for backup_id, error in zip(backup_ids, delete_results, strict=True) + if error + } + if agent_errors: + LOGGER.error( + "Error deleting old copies: %s", + agent_errors, + ) diff --git a/homeassistant/components/backup/const.py b/homeassistant/components/backup/const.py index f613f7cc352..c2070a37b2d 100644 --- a/homeassistant/components/backup/const.py +++ b/homeassistant/components/backup/const.py @@ -10,6 +10,7 @@ from homeassistant.util.hass_dict import HassKey if TYPE_CHECKING: from .manager import BackupManager +BUF_SIZE = 2**20 * 4 # 4MB DOMAIN = "backup" DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN) LOGGER = getLogger(__package__) @@ -22,6 +23,12 @@ EXCLUDE_FROM_BACKUP = [ "*.log.*", "*.log", "backups/*.tar", + "tmp_backups/*.tar", "OZW_Log.txt", "tts/*", ] + +EXCLUDE_DATABASE_FROM_BACKUP = [ + "home-assistant_v2.db", + "home-assistant_v2.db-wal", +] diff --git a/homeassistant/components/backup/http.py b/homeassistant/components/backup/http.py index 42693035bd3..73a8c8eb602 100644 --- a/homeassistant/components/backup/http.py +++ b/homeassistant/components/backup/http.py @@ -8,10 +8,11 @@ from typing import cast from aiohttp import BodyPartReader from aiohttp.hdrs import CONTENT_DISPOSITION -from aiohttp.web import FileResponse, Request, Response +from aiohttp.web import FileResponse, Request, Response, StreamResponse from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.util import slugify from .const import DATA_MANAGER @@ -27,30 +28,47 @@ def async_register_http_views(hass: HomeAssistant) -> None: class DownloadBackupView(HomeAssistantView): """Generate backup view.""" - url = "/api/backup/download/{slug}" + url = "/api/backup/download/{backup_id}" name = "api:backup:download" async def get( self, request: Request, - slug: str, - ) -> FileResponse | Response: + backup_id: str, + ) -> StreamResponse | FileResponse | Response: """Download a backup file.""" if not request["hass_user"].is_admin: return Response(status=HTTPStatus.UNAUTHORIZED) + try: + agent_id = request.query.getone("agent_id") + except KeyError: + return Response(status=HTTPStatus.BAD_REQUEST) manager = request.app[KEY_HASS].data[DATA_MANAGER] - backup = await manager.async_get_backup(slug=slug) + if agent_id not in manager.backup_agents: + return Response(status=HTTPStatus.BAD_REQUEST) + agent = manager.backup_agents[agent_id] + backup = await agent.async_get_backup(backup_id) - if backup is None or not backup.path.exists(): + # We don't need to check if the path exists, aiohttp.FileResponse will handle + # that + if backup is None: return Response(status=HTTPStatus.NOT_FOUND) - return FileResponse( - path=backup.path.as_posix(), - headers={ - CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar" - }, - ) + headers = { + CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar" + } + if agent_id in manager.local_backup_agents: + local_agent = manager.local_backup_agents[agent_id] + path = local_agent.get_backup_path(backup_id) + return FileResponse(path=path.as_posix(), headers=headers) + + stream = await agent.async_download_backup(backup_id) + response = StreamResponse(status=HTTPStatus.OK, headers=headers) + await response.prepare(request) + async for chunk in stream: + await response.write(chunk) + return response class UploadBackupView(HomeAssistantView): @@ -62,15 +80,24 @@ class UploadBackupView(HomeAssistantView): @require_admin async def post(self, request: Request) -> Response: """Upload a backup file.""" + try: + agent_ids = request.query.getall("agent_id") + except KeyError: + return Response(status=HTTPStatus.BAD_REQUEST) manager = request.app[KEY_HASS].data[DATA_MANAGER] reader = await request.multipart() contents = cast(BodyPartReader, await reader.next()) try: - await manager.async_receive_backup(contents=contents) + await manager.async_receive_backup(contents=contents, agent_ids=agent_ids) except OSError as err: return Response( - body=f"Can't write backup file {err}", + body=f"Can't write backup file: {err}", + status=HTTPStatus.INTERNAL_SERVER_ERROR, + ) + except HomeAssistantError as err: + return Response( + body=f"Can't upload backup file: {err}", status=HTTPStatus.INTERNAL_SERVER_ERROR, ) except asyncio.CancelledError: diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 4300f75eed0..1defbd350fb 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -4,49 +4,181 @@ from __future__ import annotations import abc import asyncio -from dataclasses import asdict, dataclass +from collections.abc import AsyncIterator, Callable, Coroutine +from dataclasses import dataclass +from enum import StrEnum import hashlib import io import json from pathlib import Path -from queue import SimpleQueue import shutil import tarfile -from tarfile import TarError -from tempfile import TemporaryDirectory import time -from typing import Any, Protocol, cast +from typing import TYPE_CHECKING, Any, Protocol, TypedDict import aiohttp from securetar import SecureTarFile, atomic_contents_add -from homeassistant.backup_restore import RESTORE_BACKUP_FILE +from homeassistant.backup_restore import RESTORE_BACKUP_FILE, password_to_key from homeassistant.const import __version__ as HAVERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import integration_platform from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util -from homeassistant.util.json import json_loads_object -from .const import DOMAIN, EXCLUDE_FROM_BACKUP, LOGGER - -BUF_SIZE = 2**20 * 4 # 4MB +from .agent import ( + BackupAgent, + BackupAgentError, + BackupAgentPlatformProtocol, + LocalBackupAgent, +) +from .config import BackupConfig +from .const import ( + BUF_SIZE, + DATA_MANAGER, + DOMAIN, + EXCLUDE_DATABASE_FROM_BACKUP, + EXCLUDE_FROM_BACKUP, + LOGGER, +) +from .models import AgentBackup, Folder +from .store import BackupStore +from .util import make_backup_dir, read_backup -@dataclass(slots=True) -class Backup: +@dataclass(frozen=True, kw_only=True, slots=True) +class NewBackup: + """New backup class.""" + + backup_job_id: str + + +@dataclass(frozen=True, kw_only=True, slots=True) +class ManagerBackup(AgentBackup): """Backup class.""" - slug: str - name: str - date: str - path: Path - size: float + agent_ids: list[str] + failed_agent_ids: list[str] + with_strategy_settings: bool - def as_dict(self) -> dict: - """Return a dict representation of this backup.""" - return {**asdict(self), "path": self.path.as_posix()} + +@dataclass(frozen=True, kw_only=True, slots=True) +class WrittenBackup: + """Written backup class.""" + + backup: AgentBackup + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]] + release_stream: Callable[[], Coroutine[Any, Any, None]] + + +class BackupManagerState(StrEnum): + """Backup state type.""" + + IDLE = "idle" + CREATE_BACKUP = "create_backup" + RECEIVE_BACKUP = "receive_backup" + RESTORE_BACKUP = "restore_backup" + + +class CreateBackupStage(StrEnum): + """Create backup stage enum.""" + + ADDON_REPOSITORIES = "addon_repositories" + ADDONS = "addons" + AWAIT_ADDON_RESTARTS = "await_addon_restarts" + DOCKER_CONFIG = "docker_config" + FINISHING_FILE = "finishing_file" + FOLDERS = "folders" + HOME_ASSISTANT = "home_assistant" + UPLOAD_TO_AGENTS = "upload_to_agents" + + +class CreateBackupState(StrEnum): + """Create backup state enum.""" + + COMPLETED = "completed" + FAILED = "failed" + IN_PROGRESS = "in_progress" + + +class ReceiveBackupStage(StrEnum): + """Receive backup stage enum.""" + + RECEIVE_FILE = "receive_file" + UPLOAD_TO_AGENTS = "upload_to_agents" + + +class ReceiveBackupState(StrEnum): + """Receive backup state enum.""" + + COMPLETED = "completed" + FAILED = "failed" + IN_PROGRESS = "in_progress" + + +class RestoreBackupStage(StrEnum): + """Restore backup stage enum.""" + + ADDON_REPOSITORIES = "addon_repositories" + ADDONS = "addons" + AWAIT_ADDON_RESTARTS = "await_addon_restarts" + AWAIT_HOME_ASSISTANT_RESTART = "await_home_assistant_restart" + CHECK_HOME_ASSISTANT = "check_home_assistant" + DOCKER_CONFIG = "docker_config" + DOWNLOAD_FROM_AGENT = "download_from_agent" + FOLDERS = "folders" + HOME_ASSISTANT = "home_assistant" + REMOVE_DELTA_ADDONS = "remove_delta_addons" + + +class RestoreBackupState(StrEnum): + """Receive backup state enum.""" + + COMPLETED = "completed" + FAILED = "failed" + IN_PROGRESS = "in_progress" + + +@dataclass(frozen=True, kw_only=True, slots=True) +class ManagerStateEvent: + """Backup state class.""" + + manager_state: BackupManagerState + + +@dataclass(frozen=True, kw_only=True, slots=True) +class IdleEvent(ManagerStateEvent): + """Backup manager idle.""" + + manager_state: BackupManagerState = BackupManagerState.IDLE + + +@dataclass(frozen=True, kw_only=True, slots=True) +class CreateBackupEvent(ManagerStateEvent): + """Backup in progress.""" + + manager_state: BackupManagerState = BackupManagerState.CREATE_BACKUP + stage: CreateBackupStage | None + state: CreateBackupState + + +@dataclass(frozen=True, kw_only=True, slots=True) +class ReceiveBackupEvent(ManagerStateEvent): + """Backup receive.""" + + manager_state: BackupManagerState = BackupManagerState.RECEIVE_BACKUP + stage: ReceiveBackupStage | None + state: ReceiveBackupState + + +@dataclass(frozen=True, kw_only=True, slots=True) +class RestoreBackupEvent(ManagerStateEvent): + """Backup restore.""" + + manager_state: BackupManagerState = BackupManagerState.RESTORE_BACKUP + stage: RestoreBackupStage | None + state: RestoreBackupState class BackupPlatformProtocol(Protocol): @@ -59,40 +191,143 @@ class BackupPlatformProtocol(Protocol): """Perform operations after a backup finishes.""" -class BaseBackupManager(abc.ABC): +class BackupReaderWriter(abc.ABC): + """Abstract class for reading and writing backups.""" + + @abc.abstractmethod + async def async_create_backup( + self, + *, + agent_ids: list[str], + backup_name: str, + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + on_progress: Callable[[ManagerStateEvent], None], + password: str | None, + ) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]: + """Create a backup.""" + + @abc.abstractmethod + async def async_receive_backup( + self, + *, + agent_ids: list[str], + stream: AsyncIterator[bytes], + suggested_filename: str, + ) -> WrittenBackup: + """Receive a backup.""" + + @abc.abstractmethod + async def async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Restore a backup.""" + + +class BackupManager: """Define the format that backup managers can have.""" - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, reader_writer: BackupReaderWriter) -> None: """Initialize the backup manager.""" self.hass = hass - self.backing_up = False - self.backups: dict[str, Backup] = {} - self.loaded_platforms = False self.platforms: dict[str, BackupPlatformProtocol] = {} + self.backup_agents: dict[str, BackupAgent] = {} + self.local_backup_agents: dict[str, LocalBackupAgent] = {} + + self.config = BackupConfig(hass, self) + self._reader_writer = reader_writer + self.known_backups = KnownBackups(self) + self.store = BackupStore(hass, self) + + # Tasks and flags tracking backup and restore progress + self._backup_task: asyncio.Task[WrittenBackup] | None = None + self._backup_finish_task: asyncio.Task[None] | None = None + + # Backup schedule and retention listeners + self.remove_next_backup_event: Callable[[], None] | None = None + self.remove_next_delete_event: Callable[[], None] | None = None + + # Latest backup event and backup event subscribers + self.last_event: ManagerStateEvent = IdleEvent() + self._backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = [] + + async def async_setup(self) -> None: + """Set up the backup manager.""" + stored = await self.store.load() + if stored: + self.config.load(stored["config"]) + self.known_backups.load(stored["backups"]) + + await self.load_platforms() + + @property + def state(self) -> BackupManagerState: + """Return the state of the backup manager.""" + return self.last_event.manager_state @callback - def _add_platform( + def _add_platform_pre_post_handler( self, - hass: HomeAssistant, integration_domain: str, platform: BackupPlatformProtocol, ) -> None: - """Add a platform to the backup manager.""" + """Add a backup platform.""" if not hasattr(platform, "async_pre_backup") or not hasattr( platform, "async_post_backup" ): - LOGGER.warning( - "%s does not implement required functions for the backup platform", - integration_domain, - ) return + self.platforms[integration_domain] = platform - async def async_pre_backup_actions(self, **kwargs: Any) -> None: - """Perform pre backup actions.""" - if not self.loaded_platforms: - await self.load_platforms() + async def _async_add_platform_agents( + self, + integration_domain: str, + platform: BackupAgentPlatformProtocol, + ) -> None: + """Add a platform to the backup manager.""" + if not hasattr(platform, "async_get_backup_agents"): + return + agents = await platform.async_get_backup_agents(self.hass) + self.backup_agents.update( + {f"{integration_domain}.{agent.name}": agent for agent in agents} + ) + self.local_backup_agents.update( + { + f"{integration_domain}.{agent.name}": agent + for agent in agents + if isinstance(agent, LocalBackupAgent) + } + ) + + async def _add_platform( + self, + hass: HomeAssistant, + integration_domain: str, + platform: Any, + ) -> None: + """Add a backup platform manager.""" + self._add_platform_pre_post_handler(integration_domain, platform) + await self._async_add_platform_agents(integration_domain, platform) + LOGGER.debug("Backup platform %s loaded", integration_domain) + LOGGER.debug("%s platforms loaded in total", len(self.platforms)) + LOGGER.debug("%s agents loaded in total", len(self.backup_agents)) + LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents)) + + async def async_pre_backup_actions(self) -> None: + """Perform pre backup actions.""" pre_backup_results = await asyncio.gather( *( platform.async_pre_backup(self.hass) @@ -104,11 +339,8 @@ class BaseBackupManager(abc.ABC): if isinstance(result, Exception): raise result - async def async_post_backup_actions(self, **kwargs: Any) -> None: + async def async_post_backup_actions(self) -> None: """Perform post backup actions.""" - if not self.loaded_platforms: - await self.load_platforms() - post_backup_results = await asyncio.gather( *( platform.async_post_backup(self.hass) @@ -123,226 +355,703 @@ class BaseBackupManager(abc.ABC): async def load_platforms(self) -> None: """Load backup platforms.""" await integration_platform.async_process_integration_platforms( - self.hass, DOMAIN, self._add_platform, wait_for_platforms=True + self.hass, + DOMAIN, + self._add_platform, + wait_for_platforms=True, ) LOGGER.debug("Loaded %s platforms", len(self.platforms)) - self.loaded_platforms = True + LOGGER.debug("Loaded %s agents", len(self.backup_agents)) - @abc.abstractmethod - async def async_restore_backup(self, slug: str, **kwargs: Any) -> None: - """Restore a backup.""" + async def _async_upload_backup( + self, + *, + backup: AgentBackup, + agent_ids: list[str], + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + ) -> dict[str, Exception]: + """Upload a backup to selected agents.""" + agent_errors: dict[str, Exception] = {} - @abc.abstractmethod - async def async_create_backup(self, **kwargs: Any) -> Backup: - """Generate a backup.""" + LOGGER.debug("Uploading backup %s to agents %s", backup.backup_id, agent_ids) - @abc.abstractmethod - async def async_get_backups(self, **kwargs: Any) -> dict[str, Backup]: + sync_backup_results = await asyncio.gather( + *( + self.backup_agents[agent_id].async_upload_backup( + open_stream=open_stream, + backup=backup, + ) + for agent_id in agent_ids + ), + return_exceptions=True, + ) + for idx, result in enumerate(sync_backup_results): + if isinstance(result, Exception): + agent_errors[agent_ids[idx]] = result + LOGGER.exception( + "Error during backup upload - %s", result, exc_info=result + ) + return agent_errors + + async def async_get_backups( + self, + ) -> tuple[dict[str, ManagerBackup], dict[str, Exception]]: """Get backups. - Return a dictionary of Backup instances keyed by their slug. + Return a dictionary of Backup instances keyed by their ID. """ + backups: dict[str, ManagerBackup] = {} + agent_errors: dict[str, Exception] = {} + agent_ids = list(self.backup_agents) - @abc.abstractmethod - async def async_get_backup(self, *, slug: str, **kwargs: Any) -> Backup | None: + list_backups_results = await asyncio.gather( + *(agent.async_list_backups() for agent in self.backup_agents.values()), + return_exceptions=True, + ) + for idx, result in enumerate(list_backups_results): + if isinstance(result, BackupAgentError): + agent_errors[agent_ids[idx]] = result + continue + if isinstance(result, BaseException): + raise result + for agent_backup in result: + if (backup_id := agent_backup.backup_id) not in backups: + if known_backup := self.known_backups.get(backup_id): + failed_agent_ids = known_backup.failed_agent_ids + with_strategy_settings = known_backup.with_strategy_settings + else: + failed_agent_ids = [] + with_strategy_settings = False + backups[backup_id] = ManagerBackup( + agent_ids=[], + addons=agent_backup.addons, + backup_id=backup_id, + date=agent_backup.date, + database_included=agent_backup.database_included, + failed_agent_ids=failed_agent_ids, + folders=agent_backup.folders, + homeassistant_included=agent_backup.homeassistant_included, + homeassistant_version=agent_backup.homeassistant_version, + name=agent_backup.name, + protected=agent_backup.protected, + size=agent_backup.size, + with_strategy_settings=with_strategy_settings, + ) + backups[backup_id].agent_ids.append(agent_ids[idx]) + + return (backups, agent_errors) + + async def async_get_backup( + self, backup_id: str + ) -> tuple[ManagerBackup | None, dict[str, Exception]]: """Get a backup.""" + backup: ManagerBackup | None = None + agent_errors: dict[str, Exception] = {} + agent_ids = list(self.backup_agents) - @abc.abstractmethod - async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None: - """Remove a backup.""" + get_backup_results = await asyncio.gather( + *( + agent.async_get_backup(backup_id) + for agent in self.backup_agents.values() + ), + return_exceptions=True, + ) + for idx, result in enumerate(get_backup_results): + if isinstance(result, BackupAgentError): + agent_errors[agent_ids[idx]] = result + continue + if isinstance(result, BaseException): + raise result + if not result: + continue + if backup is None: + if known_backup := self.known_backups.get(backup_id): + failed_agent_ids = known_backup.failed_agent_ids + with_strategy_settings = known_backup.with_strategy_settings + else: + failed_agent_ids = [] + with_strategy_settings = False + backup = ManagerBackup( + agent_ids=[], + addons=result.addons, + backup_id=result.backup_id, + date=result.date, + database_included=result.database_included, + failed_agent_ids=failed_agent_ids, + folders=result.folders, + homeassistant_included=result.homeassistant_included, + homeassistant_version=result.homeassistant_version, + name=result.name, + protected=result.protected, + size=result.size, + with_strategy_settings=with_strategy_settings, + ) + backup.agent_ids.append(agent_ids[idx]) + + return (backup, agent_errors) + + async def async_delete_backup(self, backup_id: str) -> dict[str, Exception]: + """Delete a backup.""" + agent_errors: dict[str, Exception] = {} + agent_ids = list(self.backup_agents) + + delete_backup_results = await asyncio.gather( + *( + agent.async_delete_backup(backup_id) + for agent in self.backup_agents.values() + ), + return_exceptions=True, + ) + for idx, result in enumerate(delete_backup_results): + if isinstance(result, BackupAgentError): + agent_errors[agent_ids[idx]] = result + continue + if isinstance(result, BaseException): + raise result + + if not agent_errors: + self.known_backups.remove(backup_id) + + return agent_errors - @abc.abstractmethod async def async_receive_backup( self, *, + agent_ids: list[str], contents: aiohttp.BodyPartReader, - **kwargs: Any, ) -> None: """Receive and store a backup file from upload.""" + if self.state is not BackupManagerState.IDLE: + raise HomeAssistantError(f"Backup manager busy: {self.state}") + self.async_on_backup_event( + ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS) + ) + try: + await self._async_receive_backup(agent_ids=agent_ids, contents=contents) + except Exception: + self.async_on_backup_event( + ReceiveBackupEvent(stage=None, state=ReceiveBackupState.FAILED) + ) + raise + else: + self.async_on_backup_event( + ReceiveBackupEvent(stage=None, state=ReceiveBackupState.COMPLETED) + ) + finally: + self.async_on_backup_event(IdleEvent()) + + async def _async_receive_backup( + self, + *, + agent_ids: list[str], + contents: aiohttp.BodyPartReader, + ) -> None: + """Receive and store a backup file from upload.""" + contents.chunk_size = BUF_SIZE + self.async_on_backup_event( + ReceiveBackupEvent( + stage=ReceiveBackupStage.RECEIVE_FILE, + state=ReceiveBackupState.IN_PROGRESS, + ) + ) + written_backup = await self._reader_writer.async_receive_backup( + agent_ids=agent_ids, + stream=contents, + suggested_filename=contents.filename or "backup.tar", + ) + self.async_on_backup_event( + ReceiveBackupEvent( + stage=ReceiveBackupStage.UPLOAD_TO_AGENTS, + state=ReceiveBackupState.IN_PROGRESS, + ) + ) + agent_errors = await self._async_upload_backup( + backup=written_backup.backup, + agent_ids=agent_ids, + open_stream=written_backup.open_stream, + ) + await written_backup.release_stream() + self.known_backups.add(written_backup.backup, agent_errors, False) + + async def async_create_backup( + self, + *, + agent_ids: list[str], + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + name: str | None, + password: str | None, + with_strategy_settings: bool = False, + ) -> NewBackup: + """Create a backup.""" + new_backup = await self.async_initiate_backup( + agent_ids=agent_ids, + include_addons=include_addons, + include_all_addons=include_all_addons, + include_database=include_database, + include_folders=include_folders, + include_homeassistant=include_homeassistant, + name=name, + password=password, + with_strategy_settings=with_strategy_settings, + ) + assert self._backup_finish_task + await self._backup_finish_task + return new_backup + + async def async_initiate_backup( + self, + *, + agent_ids: list[str], + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + name: str | None, + password: str | None, + with_strategy_settings: bool = False, + ) -> NewBackup: + """Initiate generating a backup.""" + if self.state is not BackupManagerState.IDLE: + raise HomeAssistantError(f"Backup manager busy: {self.state}") + + if with_strategy_settings: + self.config.data.last_attempted_strategy_backup = dt_util.now() + self.store.save() + + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.IN_PROGRESS) + ) + try: + return await self._async_create_backup( + agent_ids=agent_ids, + include_addons=include_addons, + include_all_addons=include_all_addons, + include_database=include_database, + include_folders=include_folders, + include_homeassistant=include_homeassistant, + name=name, + password=password, + with_strategy_settings=with_strategy_settings, + ) + except Exception: + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) + ) + self.async_on_backup_event(IdleEvent()) + raise + + async def _async_create_backup( + self, + *, + agent_ids: list[str], + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + name: str | None, + password: str | None, + with_strategy_settings: bool, + ) -> NewBackup: + """Initiate generating a backup.""" + if not agent_ids: + raise HomeAssistantError("At least one agent must be selected") + if any(agent_id not in self.backup_agents for agent_id in agent_ids): + raise HomeAssistantError("Invalid agent selected") + if include_all_addons and include_addons: + raise HomeAssistantError( + "Cannot include all addons and specify specific addons" + ) + + backup_name = name or f"Core {HAVERSION}" + new_backup, self._backup_task = await self._reader_writer.async_create_backup( + agent_ids=agent_ids, + backup_name=backup_name, + include_addons=include_addons, + include_all_addons=include_all_addons, + include_database=include_database, + include_folders=include_folders, + include_homeassistant=include_homeassistant, + on_progress=self.async_on_backup_event, + password=password, + ) + self._backup_finish_task = self.hass.async_create_task( + self._async_finish_backup(agent_ids, with_strategy_settings), + name="backup_manager_finish_backup", + ) + return new_backup + + async def _async_finish_backup( + self, agent_ids: list[str], with_strategy_settings: bool + ) -> None: + if TYPE_CHECKING: + assert self._backup_task is not None + try: + written_backup = await self._backup_task + except Exception as err: # noqa: BLE001 + LOGGER.debug("Generating backup failed", exc_info=err) + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) + ) + else: + LOGGER.debug( + "Generated new backup with backup_id %s, uploading to agents %s", + written_backup.backup.backup_id, + agent_ids, + ) + self.async_on_backup_event( + CreateBackupEvent( + stage=CreateBackupStage.UPLOAD_TO_AGENTS, + state=CreateBackupState.IN_PROGRESS, + ) + ) + agent_errors = await self._async_upload_backup( + backup=written_backup.backup, + agent_ids=agent_ids, + open_stream=written_backup.open_stream, + ) + await written_backup.release_stream() + if with_strategy_settings: + # create backup was successful, update last_completed_strategy_backup + self.config.data.last_completed_strategy_backup = dt_util.now() + self.store.save() + self.known_backups.add( + written_backup.backup, agent_errors, with_strategy_settings + ) + self.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED) + ) + finally: + self._backup_task = None + self._backup_finish_task = None + self.async_on_backup_event(IdleEvent()) + + async def async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Initiate restoring a backup.""" + if self.state is not BackupManagerState.IDLE: + raise HomeAssistantError(f"Backup manager busy: {self.state}") + + self.async_on_backup_event( + RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS) + ) + try: + await self._async_restore_backup( + backup_id=backup_id, + agent_id=agent_id, + password=password, + restore_addons=restore_addons, + restore_database=restore_database, + restore_folders=restore_folders, + restore_homeassistant=restore_homeassistant, + ) + except Exception: + self.async_on_backup_event( + RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED) + ) + raise + finally: + self.async_on_backup_event(IdleEvent()) + + async def _async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Initiate restoring a backup.""" + agent = self.backup_agents[agent_id] + if not await agent.async_get_backup(backup_id): + raise HomeAssistantError( + f"Backup {backup_id} not found in agent {agent_id}" + ) + + async def open_backup() -> AsyncIterator[bytes]: + return await agent.async_download_backup(backup_id) + + await self._reader_writer.async_restore_backup( + backup_id=backup_id, + open_stream=open_backup, + agent_id=agent_id, + password=password, + restore_addons=restore_addons, + restore_database=restore_database, + restore_folders=restore_folders, + restore_homeassistant=restore_homeassistant, + ) + + @callback + def async_on_backup_event( + self, + event: ManagerStateEvent, + ) -> None: + """Forward event to subscribers.""" + if (current_state := self.state) != (new_state := event.manager_state): + LOGGER.debug("Backup state: %s -> %s", current_state, new_state) + self.last_event = event + for subscription in self._backup_event_subscriptions: + subscription(event) + + @callback + def async_subscribe_events( + self, + on_event: Callable[[ManagerStateEvent], None], + ) -> Callable[[], None]: + """Subscribe events.""" + + def remove_subscription() -> None: + self._backup_event_subscriptions.remove(on_event) + + self._backup_event_subscriptions.append(on_event) + return remove_subscription -class BackupManager(BaseBackupManager): - """Backup manager for the Backup integration.""" +class KnownBackups: + """Track known backups.""" + + def __init__(self, manager: BackupManager) -> None: + """Initialize.""" + self._backups: dict[str, KnownBackup] = {} + self._manager = manager + + def load(self, stored_backups: list[StoredKnownBackup]) -> None: + """Load backups.""" + self._backups = { + backup["backup_id"]: KnownBackup( + backup_id=backup["backup_id"], + failed_agent_ids=backup["failed_agent_ids"], + with_strategy_settings=backup["with_strategy_settings"], + ) + for backup in stored_backups + } + + def to_list(self) -> list[StoredKnownBackup]: + """Convert known backups to a dict.""" + return [backup.to_dict() for backup in self._backups.values()] + + def add( + self, + backup: AgentBackup, + agent_errors: dict[str, Exception], + with_strategy_settings: bool, + ) -> None: + """Add a backup.""" + self._backups[backup.backup_id] = KnownBackup( + backup_id=backup.backup_id, + failed_agent_ids=list(agent_errors), + with_strategy_settings=with_strategy_settings, + ) + self._manager.store.save() + + def get(self, backup_id: str) -> KnownBackup | None: + """Get a backup.""" + return self._backups.get(backup_id) + + def remove(self, backup_id: str) -> None: + """Remove a backup.""" + if backup_id not in self._backups: + return + self._backups.pop(backup_id) + self._manager.store.save() + + +@dataclass(kw_only=True) +class KnownBackup: + """Persistent backup data.""" + + backup_id: str + failed_agent_ids: list[str] + with_strategy_settings: bool + + def to_dict(self) -> StoredKnownBackup: + """Convert known backup to a dict.""" + return { + "backup_id": self.backup_id, + "failed_agent_ids": self.failed_agent_ids, + "with_strategy_settings": self.with_strategy_settings, + } + + +class StoredKnownBackup(TypedDict): + """Stored persistent backup data.""" + + backup_id: str + failed_agent_ids: list[str] + with_strategy_settings: bool + + +class CoreBackupReaderWriter(BackupReaderWriter): + """Class for reading and writing backups in core and container installations.""" + + _local_agent_id = f"{DOMAIN}.local" def __init__(self, hass: HomeAssistant) -> None: - """Initialize the backup manager.""" - super().__init__(hass=hass) - self.backup_dir = Path(hass.config.path("backups")) - self.loaded_backups = False + """Initialize the backup reader/writer.""" + self._hass = hass + self.temp_backup_dir = Path(hass.config.path("tmp_backups")) - async def load_backups(self) -> None: - """Load data of stored backup files.""" - backups = await self.hass.async_add_executor_job(self._read_backups) - LOGGER.debug("Loaded %s backups", len(backups)) - self.backups = backups - self.loaded_backups = True - - def _read_backups(self) -> dict[str, Backup]: - """Read backups from disk.""" - backups: dict[str, Backup] = {} - for backup_path in self.backup_dir.glob("*.tar"): - try: - with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file: - if data_file := backup_file.extractfile("./backup.json"): - data = json_loads_object(data_file.read()) - backup = Backup( - slug=cast(str, data["slug"]), - name=cast(str, data["name"]), - date=cast(str, data["date"]), - path=backup_path, - size=round(backup_path.stat().st_size / 1_048_576, 2), - ) - backups[backup.slug] = backup - except (OSError, TarError, json.JSONDecodeError, KeyError) as err: - LOGGER.warning("Unable to read backup %s: %s", backup_path, err) - return backups - - async def async_get_backups(self, **kwargs: Any) -> dict[str, Backup]: - """Return backups.""" - if not self.loaded_backups: - await self.load_backups() - - return self.backups - - async def async_get_backup(self, *, slug: str, **kwargs: Any) -> Backup | None: - """Return a backup.""" - if not self.loaded_backups: - await self.load_backups() - - if not (backup := self.backups.get(slug)): - return None - - if not backup.path.exists(): - LOGGER.debug( - ( - "Removing tracked backup (%s) that does not exists on the expected" - " path %s" - ), - backup.slug, - backup.path, - ) - self.backups.pop(slug) - return None - - return backup - - async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None: - """Remove a backup.""" - if (backup := await self.async_get_backup(slug=slug)) is None: - return - - await self.hass.async_add_executor_job(backup.path.unlink, True) - LOGGER.debug("Removed backup located at %s", backup.path) - self.backups.pop(slug) - - async def async_receive_backup( + async def async_create_backup( self, *, - contents: aiohttp.BodyPartReader, - **kwargs: Any, - ) -> None: - """Receive and store a backup file from upload.""" - queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = ( - SimpleQueue() - ) - temp_dir_handler = await self.hass.async_add_executor_job(TemporaryDirectory) - target_temp_file = Path( - temp_dir_handler.name, contents.filename or "backup.tar" + agent_ids: list[str], + backup_name: str, + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + on_progress: Callable[[ManagerStateEvent], None], + password: str | None, + ) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]: + """Initiate generating a backup.""" + date_str = dt_util.now().isoformat() + backup_id = _generate_backup_id(date_str, backup_name) + + if include_addons or include_all_addons or include_folders: + raise HomeAssistantError( + "Addons and folders are not supported by core backup" + ) + if not include_homeassistant: + raise HomeAssistantError("Home Assistant must be included in backup") + + backup_task = self._hass.async_create_task( + self._async_create_backup( + agent_ids=agent_ids, + backup_id=backup_id, + backup_name=backup_name, + include_database=include_database, + date_str=date_str, + on_progress=on_progress, + password=password, + ), + name="backup_manager_create_backup", + eager_start=False, # To ensure the task is not started before we return ) - def _sync_queue_consumer() -> None: - with target_temp_file.open("wb") as file_handle: - while True: - if (_chunk_future := queue.get()) is None: - break - _chunk, _future = _chunk_future - if _future is not None: - self.hass.loop.call_soon_threadsafe(_future.set_result, None) - file_handle.write(_chunk) + return (NewBackup(backup_job_id=backup_id), backup_task) - fut: asyncio.Future[None] | None = None - try: - fut = self.hass.async_add_executor_job(_sync_queue_consumer) - megabytes_sending = 0 - while chunk := await contents.read_chunk(BUF_SIZE): - megabytes_sending += 1 - if megabytes_sending % 5 != 0: - queue.put_nowait((chunk, None)) - continue - - chunk_future = self.hass.loop.create_future() - queue.put_nowait((chunk, chunk_future)) - await asyncio.wait( - (fut, chunk_future), - return_when=asyncio.FIRST_COMPLETED, - ) - if fut.done(): - # The executor job failed - break - - queue.put_nowait(None) # terminate queue consumer - finally: - if fut is not None: - await fut - - def _move_and_cleanup() -> None: - shutil.move(target_temp_file, self.backup_dir / target_temp_file.name) - temp_dir_handler.cleanup() - - await self.hass.async_add_executor_job(_move_and_cleanup) - await self.load_backups() - - async def async_create_backup(self, **kwargs: Any) -> Backup: + async def _async_create_backup( + self, + *, + agent_ids: list[str], + backup_id: str, + backup_name: str, + date_str: str, + include_database: bool, + on_progress: Callable[[ManagerStateEvent], None], + password: str | None, + ) -> WrittenBackup: """Generate a backup.""" - if self.backing_up: - raise HomeAssistantError("Backup already in progress") + manager = self._hass.data[DATA_MANAGER] + local_agent_tar_file_path = None + if self._local_agent_id in agent_ids: + local_agent = manager.local_backup_agents[self._local_agent_id] + local_agent_tar_file_path = local_agent.get_backup_path(backup_id) + + on_progress( + CreateBackupEvent( + stage=CreateBackupStage.HOME_ASSISTANT, + state=CreateBackupState.IN_PROGRESS, + ) + ) try: - self.backing_up = True - await self.async_pre_backup_actions() - backup_name = f"Core {HAVERSION}" - date_str = dt_util.now().isoformat() - slug = _generate_slug(date_str, backup_name) + # Inform integrations a backup is about to be made + await manager.async_pre_backup_actions() backup_data = { - "slug": slug, - "name": backup_name, - "date": date_str, - "type": "partial", - "folders": ["homeassistant"], - "homeassistant": {"version": HAVERSION}, "compressed": True, + "date": date_str, + "homeassistant": { + "exclude_database": not include_database, + "version": HAVERSION, + }, + "name": backup_name, + "protected": password is not None, + "slug": backup_id, + "type": "partial", + "version": 2, } - tar_file_path = Path(self.backup_dir, f"{backup_data['slug']}.tar") - size_in_bytes = await self.hass.async_add_executor_job( + + tar_file_path, size_in_bytes = await self._hass.async_add_executor_job( self._mkdir_and_generate_backup_contents, - tar_file_path, backup_data, + include_database, + password, + local_agent_tar_file_path, ) - backup = Backup( - slug=slug, - name=backup_name, + backup = AgentBackup( + addons=[], + backup_id=backup_id, + database_included=include_database, date=date_str, - path=tar_file_path, - size=round(size_in_bytes / 1_048_576, 2), + folders=[], + homeassistant_included=True, + homeassistant_version=HAVERSION, + name=backup_name, + protected=password is not None, + size=size_in_bytes, + ) + + async_add_executor_job = self._hass.async_add_executor_job + + async def send_backup() -> AsyncIterator[bytes]: + f = await async_add_executor_job(tar_file_path.open, "rb") + try: + while chunk := await async_add_executor_job(f.read, 2**20): + yield chunk + finally: + await async_add_executor_job(f.close) + + async def open_backup() -> AsyncIterator[bytes]: + return send_backup() + + async def remove_backup() -> None: + if local_agent_tar_file_path: + return + await async_add_executor_job(tar_file_path.unlink, True) + + return WrittenBackup( + backup=backup, open_stream=open_backup, release_stream=remove_backup ) - if self.loaded_backups: - self.backups[slug] = backup - LOGGER.debug("Generated new backup with slug %s", slug) - return backup finally: - self.backing_up = False - await self.async_post_backup_actions() + # Inform integrations the backup is done + await manager.async_post_backup_actions() def _mkdir_and_generate_backup_contents( self, - tar_file_path: Path, backup_data: dict[str, Any], - ) -> int: + database_included: bool, + password: str | None, + tar_file_path: Path | None, + ) -> tuple[Path, int]: """Generate backup contents and return the size.""" - if not self.backup_dir.exists(): - LOGGER.debug("Creating backup directory") - self.backup_dir.mkdir() + if not tar_file_path: + tar_file_path = self.temp_backup_dir / f"{backup_data['slug']}.tar" + make_backup_dir(tar_file_path.parent) + + excludes = EXCLUDE_FROM_BACKUP + if not database_included: + excludes = excludes + EXCLUDE_DATABASE_FROM_BACKUP outer_secure_tarfile = SecureTarFile( tar_file_path, "w", gzip=False, bufsize=BUF_SIZE @@ -355,37 +1064,136 @@ class BackupManager(BaseBackupManager): tar_info.mtime = int(time.time()) outer_secure_tarfile_tarfile.addfile(tar_info, fileobj=fileobj) with outer_secure_tarfile.create_inner_tar( - "./homeassistant.tar.gz", gzip=True + "./homeassistant.tar.gz", + gzip=True, + key=password_to_key(password) if password is not None else None, ) as core_tar: atomic_contents_add( tar_file=core_tar, - origin_path=Path(self.hass.config.path()), - excludes=EXCLUDE_FROM_BACKUP, + origin_path=Path(self._hass.config.path()), + excludes=excludes, arcname="data", ) + return (tar_file_path, tar_file_path.stat().st_size) - return tar_file_path.stat().st_size + async def async_receive_backup( + self, + *, + agent_ids: list[str], + stream: AsyncIterator[bytes], + suggested_filename: str, + ) -> WrittenBackup: + """Receive a backup.""" + temp_file = Path(self.temp_backup_dir, suggested_filename) - async def async_restore_backup(self, slug: str, **kwargs: Any) -> None: + async_add_executor_job = self._hass.async_add_executor_job + await async_add_executor_job(make_backup_dir, self.temp_backup_dir) + f = await async_add_executor_job(temp_file.open, "wb") + try: + async for chunk in stream: + await async_add_executor_job(f.write, chunk) + finally: + await async_add_executor_job(f.close) + + try: + backup = await async_add_executor_job(read_backup, temp_file) + except (OSError, tarfile.TarError, json.JSONDecodeError, KeyError) as err: + LOGGER.warning("Unable to parse backup %s: %s", temp_file, err) + raise + + manager = self._hass.data[DATA_MANAGER] + if self._local_agent_id in agent_ids: + local_agent = manager.local_backup_agents[self._local_agent_id] + tar_file_path = local_agent.get_backup_path(backup.backup_id) + await async_add_executor_job(shutil.move, temp_file, tar_file_path) + else: + tar_file_path = temp_file + + async def send_backup() -> AsyncIterator[bytes]: + f = await async_add_executor_job(tar_file_path.open, "rb") + try: + while chunk := await async_add_executor_job(f.read, 2**20): + yield chunk + finally: + await async_add_executor_job(f.close) + + async def open_backup() -> AsyncIterator[bytes]: + return send_backup() + + async def remove_backup() -> None: + if self._local_agent_id in agent_ids: + return + await async_add_executor_job(temp_file.unlink, True) + + return WrittenBackup( + backup=backup, open_stream=open_backup, release_stream=remove_backup + ) + + async def async_restore_backup( + self, + backup_id: str, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + *, + agent_id: str, + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: """Restore a backup. This will write the restore information to .HA_RESTORE which will be handled during startup by the restore_backup module. """ - if (backup := await self.async_get_backup(slug=slug)) is None: - raise HomeAssistantError(f"Backup {slug} not found") + + if restore_addons or restore_folders: + raise HomeAssistantError( + "Addons and folders are not supported in core restore" + ) + if not restore_homeassistant and not restore_database: + raise HomeAssistantError( + "Home Assistant or database must be included in restore" + ) + + manager = self._hass.data[DATA_MANAGER] + if agent_id in manager.local_backup_agents: + local_agent = manager.local_backup_agents[agent_id] + path = local_agent.get_backup_path(backup_id) + remove_after_restore = False + else: + async_add_executor_job = self._hass.async_add_executor_job + path = self.temp_backup_dir / f"{backup_id}.tar" + stream = await open_stream() + await async_add_executor_job(make_backup_dir, self.temp_backup_dir) + f = await async_add_executor_job(path.open, "wb") + try: + async for chunk in stream: + await async_add_executor_job(f.write, chunk) + finally: + await async_add_executor_job(f.close) + + remove_after_restore = True def _write_restore_file() -> None: """Write the restore file.""" - Path(self.hass.config.path(RESTORE_BACKUP_FILE)).write_text( - json.dumps({"path": backup.path.as_posix()}), + Path(self._hass.config.path(RESTORE_BACKUP_FILE)).write_text( + json.dumps( + { + "path": path.as_posix(), + "password": password, + "remove_after_restore": remove_after_restore, + "restore_database": restore_database, + "restore_homeassistant": restore_homeassistant, + } + ), encoding="utf-8", ) - await self.hass.async_add_executor_job(_write_restore_file) - await self.hass.services.async_call("homeassistant", "restart", {}) + await self._hass.async_add_executor_job(_write_restore_file) + await self._hass.services.async_call("homeassistant", "restart", {}) -def _generate_slug(date: str, name: str) -> str: - """Generate a backup slug.""" +def _generate_backup_id(date: str, name: str) -> str: + """Generate a backup ID.""" return hashlib.sha1(f"{date} - {name}".lower().encode()).hexdigest()[:8] diff --git a/homeassistant/components/backup/manifest.json b/homeassistant/components/backup/manifest.json index 0a906bb6dfa..b399043e013 100644 --- a/homeassistant/components/backup/manifest.json +++ b/homeassistant/components/backup/manifest.json @@ -1,11 +1,12 @@ { "domain": "backup", "name": "Backup", + "after_dependencies": ["hassio"], "codeowners": ["@home-assistant/core"], "dependencies": ["http", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/backup", "integration_type": "system", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["securetar==2024.11.0"] + "requirements": ["cronsim==2.6", "securetar==2024.11.0"] } diff --git a/homeassistant/components/backup/models.py b/homeassistant/components/backup/models.py new file mode 100644 index 00000000000..6306d9f1fec --- /dev/null +++ b/homeassistant/components/backup/models.py @@ -0,0 +1,61 @@ +"""Models for the backup integration.""" + +from __future__ import annotations + +from dataclasses import asdict, dataclass +from enum import StrEnum +from typing import Any, Self + + +@dataclass(frozen=True, kw_only=True) +class AddonInfo: + """Addon information.""" + + name: str + slug: str + version: str + + +class Folder(StrEnum): + """Folder type.""" + + SHARE = "share" + ADDONS = "addons/local" + SSL = "ssl" + MEDIA = "media" + + +@dataclass(frozen=True, kw_only=True) +class AgentBackup: + """Base backup class.""" + + addons: list[AddonInfo] + backup_id: str + date: str + database_included: bool + folders: list[Folder] + homeassistant_included: bool + homeassistant_version: str | None # None if homeassistant_included is False + name: str + protected: bool + size: int + + def as_dict(self) -> dict: + """Return a dict representation of this backup.""" + return asdict(self) + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> Self: + """Create an instance from a JSON serialization.""" + return cls( + addons=[AddonInfo(**addon) for addon in data["addons"]], + backup_id=data["backup_id"], + date=data["date"], + database_included=data["database_included"], + folders=[Folder(folder) for folder in data["folders"]], + homeassistant_included=data["homeassistant_included"], + homeassistant_version=data["homeassistant_version"], + name=data["name"], + protected=data["protected"], + size=data["size"], + ) diff --git a/homeassistant/components/backup/store.py b/homeassistant/components/backup/store.py new file mode 100644 index 00000000000..ddabead24f9 --- /dev/null +++ b/homeassistant/components/backup/store.py @@ -0,0 +1,52 @@ +"""Store backup configuration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, TypedDict + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.storage import Store + +from .const import DOMAIN + +if TYPE_CHECKING: + from .config import StoredBackupConfig + from .manager import BackupManager, StoredKnownBackup + +STORE_DELAY_SAVE = 30 +STORAGE_KEY = DOMAIN +STORAGE_VERSION = 1 + + +class StoredBackupData(TypedDict): + """Represent the stored backup config.""" + + backups: list[StoredKnownBackup] + config: StoredBackupConfig + + +class BackupStore: + """Store backup config.""" + + def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None: + """Initialize the backup manager.""" + self._hass = hass + self._manager = manager + self._store: Store[StoredBackupData] = Store(hass, STORAGE_VERSION, STORAGE_KEY) + + async def load(self) -> StoredBackupData | None: + """Load the store.""" + return await self._store.async_load() + + @callback + def save(self) -> None: + """Save config.""" + self._store.async_delay_save(self._data_to_save, STORE_DELAY_SAVE) + + @callback + def _data_to_save(self) -> StoredBackupData: + """Return data to save.""" + return { + "backups": self._manager.known_backups.to_list(), + "config": self._manager.config.data.to_dict(), + } diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py new file mode 100644 index 00000000000..1d8252cc30b --- /dev/null +++ b/homeassistant/components/backup/util.py @@ -0,0 +1,111 @@ +"""Local backup support for Core and Container installations.""" + +from __future__ import annotations + +import asyncio +from pathlib import Path +from queue import SimpleQueue +import tarfile +from typing import cast + +import aiohttp + +from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonObjectType, json_loads_object + +from .const import BUF_SIZE +from .models import AddonInfo, AgentBackup, Folder + + +def make_backup_dir(path: Path) -> None: + """Create a backup directory if it does not exist.""" + path.mkdir(exist_ok=True) + + +def read_backup(backup_path: Path) -> AgentBackup: + """Read a backup from disk.""" + + with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file: + if not (data_file := backup_file.extractfile("./backup.json")): + raise KeyError("backup.json not found in tar file") + data = json_loads_object(data_file.read()) + addons = [ + AddonInfo( + name=cast(str, addon["name"]), + slug=cast(str, addon["slug"]), + version=cast(str, addon["version"]), + ) + for addon in cast(list[JsonObjectType], data.get("addons", [])) + ] + + folders = [ + Folder(folder) + for folder in cast(list[str], data.get("folders", [])) + if folder != "homeassistant" + ] + + homeassistant_included = False + homeassistant_version: str | None = None + database_included = False + if ( + homeassistant := cast(JsonObjectType, data.get("homeassistant")) + ) and "version" in homeassistant: + homeassistant_version = cast(str, homeassistant["version"]) + database_included = not cast( + bool, homeassistant.get("exclude_database", False) + ) + + return AgentBackup( + addons=addons, + backup_id=cast(str, data["slug"]), + database_included=database_included, + date=cast(str, data["date"]), + folders=folders, + homeassistant_included=homeassistant_included, + homeassistant_version=homeassistant_version, + name=cast(str, data["name"]), + protected=cast(bool, data.get("protected", False)), + size=backup_path.stat().st_size, + ) + + +async def receive_file( + hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path +) -> None: + """Receive a file from a stream and write it to a file.""" + queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = SimpleQueue() + + def _sync_queue_consumer() -> None: + with path.open("wb") as file_handle: + while True: + if (_chunk_future := queue.get()) is None: + break + _chunk, _future = _chunk_future + if _future is not None: + hass.loop.call_soon_threadsafe(_future.set_result, None) + file_handle.write(_chunk) + + fut: asyncio.Future[None] | None = None + try: + fut = hass.async_add_executor_job(_sync_queue_consumer) + megabytes_sending = 0 + while chunk := await contents.read_chunk(BUF_SIZE): + megabytes_sending += 1 + if megabytes_sending % 5 != 0: + queue.put_nowait((chunk, None)) + continue + + chunk_future = hass.loop.create_future() + queue.put_nowait((chunk, chunk_future)) + await asyncio.wait( + (fut, chunk_future), + return_when=asyncio.FIRST_COMPLETED, + ) + if fut.done(): + # The executor job failed + break + + queue.put_nowait(None) # terminate queue consumer + finally: + if fut is not None: + await fut diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 3ac8a7ace3e..7dacc39f9ba 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -7,22 +7,31 @@ import voluptuous as vol from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback +from .config import ScheduleState from .const import DATA_MANAGER, LOGGER +from .manager import ManagerStateEvent +from .models import Folder @callback def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> None: """Register websocket commands.""" + websocket_api.async_register_command(hass, backup_agents_info) + if with_hassio: websocket_api.async_register_command(hass, handle_backup_end) websocket_api.async_register_command(hass, handle_backup_start) - return websocket_api.async_register_command(hass, handle_details) websocket_api.async_register_command(hass, handle_info) websocket_api.async_register_command(hass, handle_create) - websocket_api.async_register_command(hass, handle_remove) + websocket_api.async_register_command(hass, handle_create_with_strategy_settings) + websocket_api.async_register_command(hass, handle_delete) websocket_api.async_register_command(hass, handle_restore) + websocket_api.async_register_command(hass, handle_subscribe_events) + + websocket_api.async_register_command(hass, handle_config_info) + websocket_api.async_register_command(hass, handle_config_update) @websocket_api.require_admin @@ -35,12 +44,16 @@ async def handle_info( ) -> None: """List all stored backups.""" manager = hass.data[DATA_MANAGER] - backups = await manager.async_get_backups() + backups, agent_errors = await manager.async_get_backups() connection.send_result( msg["id"], { + "agent_errors": { + agent_id: str(err) for agent_id, err in agent_errors.items() + }, "backups": list(backups.values()), - "backing_up": manager.backing_up, + "last_attempted_strategy_backup": manager.config.data.last_attempted_strategy_backup, + "last_completed_strategy_backup": manager.config.data.last_completed_strategy_backup, }, ) @@ -49,7 +62,7 @@ async def handle_info( @websocket_api.websocket_command( { vol.Required("type"): "backup/details", - vol.Required("slug"): str, + vol.Required("backup_id"): str, } ) @websocket_api.async_response @@ -58,11 +71,16 @@ async def handle_details( connection: websocket_api.ActiveConnection, msg: dict[str, Any], ) -> None: - """Get backup details for a specific slug.""" - backup = await hass.data[DATA_MANAGER].async_get_backup(slug=msg["slug"]) + """Get backup details for a specific backup.""" + backup, agent_errors = await hass.data[DATA_MANAGER].async_get_backup( + msg["backup_id"] + ) connection.send_result( msg["id"], { + "agent_errors": { + agent_id: str(err) for agent_id, err in agent_errors.items() + }, "backup": backup, }, ) @@ -71,26 +89,39 @@ async def handle_details( @websocket_api.require_admin @websocket_api.websocket_command( { - vol.Required("type"): "backup/remove", - vol.Required("slug"): str, + vol.Required("type"): "backup/delete", + vol.Required("backup_id"): str, } ) @websocket_api.async_response -async def handle_remove( +async def handle_delete( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], ) -> None: - """Remove a backup.""" - await hass.data[DATA_MANAGER].async_remove_backup(slug=msg["slug"]) - connection.send_result(msg["id"]) + """Delete a backup.""" + agent_errors = await hass.data[DATA_MANAGER].async_delete_backup(msg["backup_id"]) + connection.send_result( + msg["id"], + { + "agent_errors": { + agent_id: str(err) for agent_id, err in agent_errors.items() + } + }, + ) @websocket_api.require_admin @websocket_api.websocket_command( { vol.Required("type"): "backup/restore", - vol.Required("slug"): str, + vol.Required("backup_id"): str, + vol.Required("agent_id"): str, + vol.Optional("password"): str, + vol.Optional("restore_addons"): [str], + vol.Optional("restore_database", default=True): bool, + vol.Optional("restore_folders"): [vol.Coerce(Folder)], + vol.Optional("restore_homeassistant", default=True): bool, } ) @websocket_api.async_response @@ -100,12 +131,32 @@ async def handle_restore( msg: dict[str, Any], ) -> None: """Restore a backup.""" - await hass.data[DATA_MANAGER].async_restore_backup(msg["slug"]) + await hass.data[DATA_MANAGER].async_restore_backup( + msg["backup_id"], + agent_id=msg["agent_id"], + password=msg.get("password"), + restore_addons=msg.get("restore_addons"), + restore_database=msg["restore_database"], + restore_folders=msg.get("restore_folders"), + restore_homeassistant=msg["restore_homeassistant"], + ) connection.send_result(msg["id"]) @websocket_api.require_admin -@websocket_api.websocket_command({vol.Required("type"): "backup/generate"}) +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/generate", + vol.Required("agent_ids"): [str], + vol.Optional("include_addons"): [str], + vol.Optional("include_all_addons", default=False): bool, + vol.Optional("include_database", default=True): bool, + vol.Optional("include_folders"): [vol.Coerce(Folder)], + vol.Optional("include_homeassistant", default=True): bool, + vol.Optional("name"): str, + vol.Optional("password"): str, + } +) @websocket_api.async_response async def handle_create( hass: HomeAssistant, @@ -113,7 +164,46 @@ async def handle_create( msg: dict[str, Any], ) -> None: """Generate a backup.""" - backup = await hass.data[DATA_MANAGER].async_create_backup() + + backup = await hass.data[DATA_MANAGER].async_initiate_backup( + agent_ids=msg["agent_ids"], + include_addons=msg.get("include_addons"), + include_all_addons=msg["include_all_addons"], + include_database=msg["include_database"], + include_folders=msg.get("include_folders"), + include_homeassistant=msg["include_homeassistant"], + name=msg.get("name"), + password=msg.get("password"), + ) + connection.send_result(msg["id"], backup) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/generate_with_strategy_settings", + } +) +@websocket_api.async_response +async def handle_create_with_strategy_settings( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Generate a backup with stored settings.""" + + config_data = hass.data[DATA_MANAGER].config.data + backup = await hass.data[DATA_MANAGER].async_initiate_backup( + agent_ids=config_data.create_backup.agent_ids, + include_addons=config_data.create_backup.include_addons, + include_all_addons=config_data.create_backup.include_all_addons, + include_database=config_data.create_backup.include_database, + include_folders=config_data.create_backup.include_folders, + include_homeassistant=True, # always include HA + name=config_data.create_backup.name, + password=config_data.create_backup.password, + with_strategy_settings=True, + ) connection.send_result(msg["id"], backup) @@ -127,7 +217,6 @@ async def handle_backup_start( ) -> None: """Backup start notification.""" manager = hass.data[DATA_MANAGER] - manager.backing_up = True LOGGER.debug("Backup start notification") try: @@ -149,7 +238,6 @@ async def handle_backup_end( ) -> None: """Backup end notification.""" manager = hass.data[DATA_MANAGER] - manager.backing_up = False LOGGER.debug("Backup end notification") try: @@ -159,3 +247,97 @@ async def handle_backup_end( return connection.send_result(msg["id"]) + + +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "backup/agents/info"}) +@websocket_api.async_response +async def backup_agents_info( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Return backup agents info.""" + manager = hass.data[DATA_MANAGER] + connection.send_result( + msg["id"], + { + "agents": [{"agent_id": agent_id} for agent_id in manager.backup_agents], + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "backup/config/info"}) +@websocket_api.async_response +async def handle_config_info( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Send the stored backup config.""" + manager = hass.data[DATA_MANAGER] + connection.send_result( + msg["id"], + { + "config": manager.config.data.to_dict(), + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/config/update", + vol.Optional("create_backup"): vol.Schema( + { + vol.Optional("agent_ids"): vol.All(list[str]), + vol.Optional("include_addons"): vol.Any(list[str], None), + vol.Optional("include_all_addons"): bool, + vol.Optional("include_database"): bool, + vol.Optional("include_folders"): vol.Any([vol.Coerce(Folder)], None), + vol.Optional("name"): vol.Any(str, None), + vol.Optional("password"): vol.Any(str, None), + }, + ), + vol.Optional("retention"): vol.Schema( + { + vol.Optional("copies"): vol.Any(int, None), + vol.Optional("days"): vol.Any(int, None), + }, + ), + vol.Optional("schedule"): vol.All(str, vol.Coerce(ScheduleState)), + } +) +@websocket_api.async_response +async def handle_config_update( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Update the stored backup config.""" + manager = hass.data[DATA_MANAGER] + changes = dict(msg) + changes.pop("id") + changes.pop("type") + await manager.config.update(**changes) + connection.send_result(msg["id"]) + + +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"}) +@websocket_api.async_response +async def handle_subscribe_events( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Subscribe to backup events.""" + + def on_event(event: ManagerStateEvent) -> None: + connection.send_message(websocket_api.event_message(msg["id"], event)) + + manager = hass.data[DATA_MANAGER] + on_event(manager.last_event) + connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event) + connection.send_result(msg["id"]) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py new file mode 100644 index 00000000000..58ecc7a78fd --- /dev/null +++ b/homeassistant/components/cloud/backup.py @@ -0,0 +1,196 @@ +"""Backup platform for the cloud integration.""" + +from __future__ import annotations + +import base64 +from collections.abc import AsyncIterator, Callable, Coroutine +import hashlib +from typing import Any, Self + +from aiohttp import ClientError, StreamReader +from hass_nabucasa import Cloud, CloudError +from hass_nabucasa.cloud_api import ( + async_files_delete_file, + async_files_download_details, + async_files_list, + async_files_upload_details, +) + +from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError +from homeassistant.core import HomeAssistant, callback + +from .client import CloudClient +from .const import DATA_CLOUD, DOMAIN + +_STORAGE_BACKUP = "backup" + + +async def _b64md5(stream: AsyncIterator[bytes]) -> str: + """Calculate the MD5 hash of a file.""" + file_hash = hashlib.md5() + async for chunk in stream: + file_hash.update(chunk) + return base64.b64encode(file_hash.digest()).decode() + + +async def async_get_backup_agents( + hass: HomeAssistant, + **kwargs: Any, +) -> list[BackupAgent]: + """Return the cloud backup agent.""" + return [CloudBackupAgent(hass=hass, cloud=hass.data[DATA_CLOUD])] + + +class ChunkAsyncStreamIterator: + """Async iterator for chunked streams. + + Based on aiohttp.streams.ChunkTupleAsyncStreamIterator, but yields + bytes instead of tuple[bytes, bool]. + """ + + __slots__ = ("_stream",) + + def __init__(self, stream: StreamReader) -> None: + """Initialize.""" + self._stream = stream + + def __aiter__(self) -> Self: + """Iterate.""" + return self + + async def __anext__(self) -> bytes: + """Yield next chunk.""" + rv = await self._stream.readchunk() + if rv == (b"", False): + raise StopAsyncIteration + return rv[0] + + +class CloudBackupAgent(BackupAgent): + """Cloud backup agent.""" + + name = DOMAIN + + def __init__(self, hass: HomeAssistant, cloud: Cloud[CloudClient]) -> None: + """Initialize the cloud backup sync agent.""" + super().__init__() + self._cloud = cloud + self._hass = hass + + @callback + def _get_backup_filename(self) -> str: + """Return the backup filename.""" + return f"{self._cloud.client.prefs.instance_id}.tar" + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + :return: An async iterator that yields bytes. + """ + if not await self.async_get_backup(backup_id): + raise BackupAgentError("Backup not found") + + try: + details = await async_files_download_details( + self._cloud, + storage_type=_STORAGE_BACKUP, + filename=self._get_backup_filename(), + ) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to get download details") from err + + try: + resp = await self._cloud.websession.get(details["url"]) + resp.raise_for_status() + except ClientError as err: + raise BackupAgentError("Failed to download backup") from err + + return ChunkAsyncStreamIterator(resp.content) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup. + + :param open_stream: A function returning an async iterator that yields bytes. + :param backup: Metadata about the backup that should be uploaded. + """ + if not backup.protected: + raise BackupAgentError("Cloud backups must be protected") + + base64md5hash = await _b64md5(await open_stream()) + + try: + details = await async_files_upload_details( + self._cloud, + storage_type=_STORAGE_BACKUP, + filename=self._get_backup_filename(), + metadata=backup.as_dict(), + size=backup.size, + base64md5hash=base64md5hash, + ) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to get upload details") from err + + try: + upload_status = await self._cloud.websession.put( + details["url"], + data=await open_stream(), + headers=details["headers"] | {"content-length": str(backup.size)}, + ) + upload_status.raise_for_status() + except ClientError as err: + raise BackupAgentError("Failed to upload backup") from err + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file. + + :param backup_id: The ID of the backup that was returned in async_list_backups. + """ + if not await self.async_get_backup(backup_id): + raise BackupAgentError("Backup not found") + + try: + await async_files_delete_file( + self._cloud, + storage_type=_STORAGE_BACKUP, + filename=self._get_backup_filename(), + ) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to delete backup") from err + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + try: + backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP) + except (ClientError, CloudError) as err: + raise BackupAgentError("Failed to list backups") from err + + return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups] + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + backups = await self.async_list_backups() + + for backup in backups: + if backup.backup_id == backup_id: + return backup + + return None diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 661edb67762..48f2153e86f 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -1,7 +1,12 @@ { "domain": "cloud", "name": "Home Assistant Cloud", - "after_dependencies": ["assist_pipeline", "google_assistant", "alexa"], + "after_dependencies": [ + "alexa", + "assist_pipeline", + "backup", + "google_assistant" + ], "codeowners": ["@home-assistant/cloud"], "dependencies": ["auth", "http", "repairs", "webhook"], "documentation": "https://www.home-assistant.io/integrations/cloud", diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py new file mode 100644 index 00000000000..f7f66f6cecc --- /dev/null +++ b/homeassistant/components/hassio/backup.py @@ -0,0 +1,365 @@ +"""Backup functionality for supervised installations.""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncIterator, Callable, Coroutine, Mapping +from pathlib import Path +from typing import Any, cast + +from aiohasupervisor.exceptions import SupervisorBadRequestError +from aiohasupervisor.models import ( + backups as supervisor_backups, + mounts as supervisor_mounts, +) + +from homeassistant.components.backup import ( + DATA_MANAGER, + AddonInfo, + AgentBackup, + BackupAgent, + BackupReaderWriter, + CreateBackupEvent, + Folder, + NewBackup, + WrittenBackup, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.dispatcher import async_dispatcher_connect + +from .const import DOMAIN, EVENT_SUPERVISOR_EVENT +from .handler import get_supervisor_client + +LOCATION_CLOUD_BACKUP = ".cloud_backup" + + +async def async_get_backup_agents( + hass: HomeAssistant, + **kwargs: Any, +) -> list[BackupAgent]: + """Return the hassio backup agents.""" + client = get_supervisor_client(hass) + mounts = await client.mounts.info() + agents: list[BackupAgent] = [SupervisorBackupAgent(hass, "local", None)] + for mount in mounts.mounts: + if mount.usage is not supervisor_mounts.MountUsage.BACKUP: + continue + agents.append(SupervisorBackupAgent(hass, mount.name, mount.name)) + return agents + + +def _backup_details_to_agent_backup( + details: supervisor_backups.BackupComplete, +) -> AgentBackup: + """Convert a supervisor backup details object to an agent backup.""" + homeassistant_included = details.homeassistant is not None + if not homeassistant_included: + database_included = False + else: + database_included = details.homeassistant_exclude_database is False + addons = [ + AddonInfo(name=addon.name, slug=addon.slug, version=addon.version) + for addon in details.addons + ] + return AgentBackup( + addons=addons, + backup_id=details.slug, + database_included=database_included, + date=details.date.isoformat(), + folders=[Folder(folder) for folder in details.folders], + homeassistant_included=homeassistant_included, + homeassistant_version=details.homeassistant, + name=details.name, + protected=details.protected, + size=details.size_bytes, + ) + + +class SupervisorBackupAgent(BackupAgent): + """Backup agent for supervised installations.""" + + def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None: + """Initialize the backup agent.""" + super().__init__() + self._hass = hass + self._backup_dir = Path("/backups") + self._client = get_supervisor_client(hass) + self.name = name + self.location = location + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + return await self._client.backups.download_backup(backup_id) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup. + + Not required for supervisor, the SupervisorBackupReaderWriter stores files. + """ + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + backup_list = await self._client.backups.list() + result = [] + for backup in backup_list: + if not backup.locations or self.location not in backup.locations: + continue + details = await self._client.backups.backup_info(backup.slug) + result.append(_backup_details_to_agent_backup(details)) + return result + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + details = await self._client.backups.backup_info(backup_id) + if self.location not in details.locations: + return None + return _backup_details_to_agent_backup(details) + + async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: + """Remove a backup.""" + try: + await self._client.backups.remove_backup(backup_id) + except SupervisorBadRequestError as err: + if err.args[0] != "Backup does not exist": + raise + + +class SupervisorBackupReaderWriter(BackupReaderWriter): + """Class for reading and writing backups in supervised installations.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the backup reader/writer.""" + self._hass = hass + self._client = get_supervisor_client(hass) + + async def async_create_backup( + self, + *, + agent_ids: list[str], + backup_name: str, + include_addons: list[str] | None, + include_all_addons: bool, + include_database: bool, + include_folders: list[Folder] | None, + include_homeassistant: bool, + on_progress: Callable[[CreateBackupEvent], None], + password: str | None, + ) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]: + """Create a backup.""" + manager = self._hass.data[DATA_MANAGER] + + include_addons_set = set(include_addons) if include_addons else None + include_folders_set = ( + {supervisor_backups.Folder(folder) for folder in include_folders} + if include_folders + else None + ) + + hassio_agents: list[SupervisorBackupAgent] = [ + cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) + for agent_id in agent_ids + if agent_id.startswith(DOMAIN) + ] + locations = {agent.location for agent in hassio_agents} + + backup = await self._client.backups.partial_backup( + supervisor_backups.PartialBackupOptions( + addons=include_addons_set, + folders=include_folders_set, + homeassistant=include_homeassistant, + name=backup_name, + password=password, + compressed=True, + location=locations or LOCATION_CLOUD_BACKUP, + homeassistant_exclude_database=not include_database, + background=True, + ) + ) + backup_task = self._hass.async_create_task( + self._async_wait_for_backup( + backup, remove_after_upload=not bool(locations) + ), + name="backup_manager_create_backup", + eager_start=False, # To ensure the task is not started before we return + ) + + return (NewBackup(backup_job_id=backup.job_id), backup_task) + + async def _async_wait_for_backup( + self, backup: supervisor_backups.NewBackup, *, remove_after_upload: bool + ) -> WrittenBackup: + """Wait for a backup to complete.""" + backup_complete = asyncio.Event() + backup_id: str | None = None + + @callback + def on_progress(data: Mapping[str, Any]) -> None: + """Handle backup progress.""" + nonlocal backup_id + if data.get("done") is True: + backup_id = data.get("reference") + backup_complete.set() + + try: + unsub = self._async_listen_job_events(backup.job_id, on_progress) + await backup_complete.wait() + finally: + unsub() + if not backup_id: + raise HomeAssistantError("Backup failed") + + async def open_backup() -> AsyncIterator[bytes]: + return await self._client.backups.download_backup(backup_id) + + async def remove_backup() -> None: + if not remove_after_upload: + return + await self._client.backups.remove_backup(backup_id) + + details = await self._client.backups.backup_info(backup_id) + + return WrittenBackup( + backup=_backup_details_to_agent_backup(details), + open_stream=open_backup, + release_stream=remove_backup, + ) + + async def async_receive_backup( + self, + *, + agent_ids: list[str], + stream: AsyncIterator[bytes], + suggested_filename: str, + ) -> WrittenBackup: + """Receive a backup.""" + manager = self._hass.data[DATA_MANAGER] + + hassio_agents: list[SupervisorBackupAgent] = [ + cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) + for agent_id in agent_ids + if agent_id.startswith(DOMAIN) + ] + locations = {agent.location for agent in hassio_agents} + + backup_id = await self._client.backups.upload_backup( + stream, + supervisor_backups.UploadBackupOptions( + location=locations or {LOCATION_CLOUD_BACKUP} + ), + ) + + async def open_backup() -> AsyncIterator[bytes]: + return await self._client.backups.download_backup(backup_id) + + async def remove_backup() -> None: + if locations: + return + await self._client.backups.remove_backup(backup_id) + + details = await self._client.backups.backup_info(backup_id) + + return WrittenBackup( + backup=_backup_details_to_agent_backup(details), + open_stream=open_backup, + release_stream=remove_backup, + ) + + async def async_restore_backup( + self, + backup_id: str, + *, + agent_id: str, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + password: str | None, + restore_addons: list[str] | None, + restore_database: bool, + restore_folders: list[Folder] | None, + restore_homeassistant: bool, + ) -> None: + """Restore a backup.""" + if restore_homeassistant and not restore_database: + raise HomeAssistantError("Cannot restore Home Assistant without database") + if not restore_homeassistant and restore_database: + raise HomeAssistantError("Cannot restore database without Home Assistant") + restore_addons_set = set(restore_addons) if restore_addons else None + restore_folders_set = ( + {supervisor_backups.Folder(folder) for folder in restore_folders} + if restore_folders + else None + ) + + if not agent_id.startswith(DOMAIN): + # Download the backup to the supervisor. Supervisor will clean up the backup + # two days after the restore is done. + await self.async_receive_backup( + agent_ids=[], + stream=await open_stream(), + suggested_filename=f"{backup_id}.tar", + ) + + job = await self._client.backups.partial_restore( + backup_id, + supervisor_backups.PartialRestoreOptions( + addons=restore_addons_set, + folders=restore_folders_set, + homeassistant=restore_homeassistant, + password=password, + background=True, + ), + ) + + restore_complete = asyncio.Event() + + @callback + def on_progress(data: Mapping[str, Any]) -> None: + """Handle backup progress.""" + if data.get("done") is True: + restore_complete.set() + + try: + unsub = self._async_listen_job_events(job.job_id, on_progress) + await restore_complete.wait() + finally: + unsub() + + @callback + def _async_listen_job_events( + self, job_id: str, on_event: Callable[[Mapping[str, Any]], None] + ) -> Callable[[], None]: + """Listen for job events.""" + + @callback + def unsub() -> None: + """Unsubscribe from job events.""" + unsub_signal() + + @callback + def handle_signal(data: Mapping[str, Any]) -> None: + """Handle a job signal.""" + if ( + data.get("event") != "job" + or not (event_data := data.get("data")) + or event_data.get("uuid") != job_id + ): + return + on_event(event_data) + + unsub_signal = async_dispatcher_connect( + self._hass, EVENT_SUPERVISOR_EVENT, handle_signal + ) + return unsub diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index 31fa27a92c4..8fe124e763c 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.2.1"], + "requirements": ["aiohasupervisor==0.2.2b0"], "single_config_entry": true } diff --git a/homeassistant/components/kitchen_sink/backup.py b/homeassistant/components/kitchen_sink/backup.py new file mode 100644 index 00000000000..02c61ff4de6 --- /dev/null +++ b/homeassistant/components/kitchen_sink/backup.py @@ -0,0 +1,92 @@ +"""Backup platform for the kitchen_sink integration.""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncIterator, Callable, Coroutine +import logging +from typing import Any + +from homeassistant.components.backup import AddonInfo, AgentBackup, BackupAgent, Folder +from homeassistant.core import HomeAssistant + +LOGGER = logging.getLogger(__name__) + + +async def async_get_backup_agents( + hass: HomeAssistant, +) -> list[BackupAgent]: + """Register the backup agents.""" + return [KitchenSinkBackupAgent("syncer")] + + +class KitchenSinkBackupAgent(BackupAgent): + """Kitchen sink backup agent.""" + + def __init__(self, name: str) -> None: + """Initialize the kitchen sink backup sync agent.""" + super().__init__() + self.name = name + self._uploads = [ + AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id="abc123", + database_included=False, + date="1970-01-01T00:00:00Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Kitchen sink syncer", + protected=False, + size=1234, + ) + ] + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + LOGGER.info("Downloading backup %s", backup_id) + reader = asyncio.StreamReader() + reader.feed_data(b"backup data") + reader.feed_eof() + return reader + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + LOGGER.info("Uploading backup %s %s", backup.backup_id, backup) + self._uploads.append(backup) + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file.""" + self._uploads = [ + upload for upload in self._uploads if upload.backup_id != backup_id + ] + LOGGER.info("Deleted backup %s", backup_id) + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List synced backups.""" + return self._uploads + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + for backup in self._uploads: + if backup.backup_id == backup_id: + return backup + return None diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 726dad56ccb..e4abf3ab678 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,7 +3,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.1 +aiohasupervisor==0.2.2b0 aiohttp-fast-zlib==0.2.0 aiohttp==3.11.10 aiohttp_cors==0.7.0 diff --git a/pyproject.toml b/pyproject.toml index 5239874e2f6..c40f8bd0d01 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ # Integrations may depend on hassio integration without listing it to # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.1", + "aiohasupervisor==0.2.2b0", "aiohttp==3.11.10", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", diff --git a/requirements.txt b/requirements.txt index 7ed445c6b65..9ef9f0e44f2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.1 +aiohasupervisor==0.2.2b0 aiohttp==3.11.10 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index c6ab1e2dfae..661ce5876a9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -262,7 +262,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.1 +aiohasupervisor==0.2.2b0 # homeassistant.components.homekit_controller aiohomekit==3.2.7 @@ -704,6 +704,7 @@ connect-box==0.3.1 # homeassistant.components.xiaomi_miio construct==2.10.68 +# homeassistant.components.backup # homeassistant.components.utility_meter cronsim==2.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f9ed2bebf99..c959d83723c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -247,7 +247,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.1 +aiohasupervisor==0.2.2b0 # homeassistant.components.homekit_controller aiohomekit==3.2.7 @@ -600,6 +600,7 @@ colorthief==0.2.1 # homeassistant.components.xiaomi_miio construct==2.10.68 +# homeassistant.components.backup # homeassistant.components.utility_meter cronsim==2.6 diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index 70b33d2de3f..133a2602192 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -2,29 +2,162 @@ from __future__ import annotations +from collections.abc import AsyncIterator, Callable, Coroutine from pathlib import Path -from unittest.mock import patch +from typing import Any +from unittest.mock import AsyncMock, Mock, patch -from homeassistant.components.backup import DOMAIN -from homeassistant.components.backup.manager import Backup +from homeassistant.components.backup import ( + DOMAIN, + AddonInfo, + AgentBackup, + BackupAgent, + BackupAgentPlatformProtocol, + Folder, +) +from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component -TEST_BACKUP = Backup( - slug="abc123", - name="Test", +from tests.common import MockPlatform, mock_platform + +LOCAL_AGENT_ID = f"{DOMAIN}.local" + +TEST_BACKUP_ABC123 = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id="abc123", + database_included=True, date="1970-01-01T00:00:00.000Z", - path=Path("abc123.tar"), - size=0.0, + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0, ) +TEST_BACKUP_PATH_ABC123 = Path("abc123.tar") + +TEST_BACKUP_DEF456 = AgentBackup( + addons=[], + backup_id="def456", + database_included=False, + date="1980-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test 2", + protected=False, + size=1, +) + +TEST_DOMAIN = "test" + + +class BackupAgentTest(BackupAgent): + """Test backup agent.""" + + def __init__(self, name: str, backups: list[AgentBackup] | None = None) -> None: + """Initialize the backup agent.""" + self.name = name + if backups is None: + backups = [ + AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id="abc123", + database_included=True, + date="1970-01-01T00:00:00Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=13, + ) + ] + + self._backup_data: bytearray | None = None + self._backups = {backup.backup_id: backup for backup in backups} + + async def async_download_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + return AsyncMock(spec_set=["__aiter__"]) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + self._backups[backup.backup_id] = backup + backup_stream = await open_stream() + self._backup_data = bytearray() + async for chunk in backup_stream: + self._backup_data += chunk + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + return list(self._backups.values()) + + async def async_get_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> AgentBackup | None: + """Return a backup.""" + return self._backups.get(backup_id) + + async def async_delete_backup( + self, + backup_id: str, + **kwargs: Any, + ) -> None: + """Delete a backup file.""" async def setup_backup_integration( hass: HomeAssistant, with_hassio: bool = False, configuration: ConfigType | None = None, + *, + backups: dict[str, list[AgentBackup]] | None = None, + remote_agents: list[str] | None = None, ) -> bool: """Set up the Backup integration.""" - with patch("homeassistant.components.backup.is_hassio", return_value=with_hassio): - return await async_setup_component(hass, DOMAIN, configuration or {}) + with ( + patch("homeassistant.components.backup.is_hassio", return_value=with_hassio), + patch( + "homeassistant.components.backup.backup.is_hassio", return_value=with_hassio + ), + ): + remote_agents = remote_agents or [] + platform = Mock( + async_get_backup_agents=AsyncMock( + return_value=[BackupAgentTest(agent, []) for agent in remote_agents] + ), + spec_set=BackupAgentPlatformProtocol, + ) + + mock_platform(hass, f"{TEST_DOMAIN}.backup", platform or MockPlatform()) + assert await async_setup_component(hass, TEST_DOMAIN, {}) + + result = await async_setup_component(hass, DOMAIN, configuration or {}) + await hass.async_block_till_done() + if not backups: + return result + + for agent_id, agent_backups in backups.items(): + if with_hassio and agent_id == LOCAL_AGENT_ID: + continue + agent = hass.data[DATA_MANAGER].backup_agents[agent_id] + agent._backups = {backups.backup_id: backups for backups in agent_backups} + if agent_id == LOCAL_AGENT_ID: + agent._loaded_backups = True + + return result diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py new file mode 100644 index 00000000000..7ccfcc4e0f0 --- /dev/null +++ b/tests/components/backup/conftest.py @@ -0,0 +1,97 @@ +"""Test fixtures for the Backup integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from pathlib import Path +from unittest.mock import MagicMock, Mock, patch + +import pytest + +from homeassistant.core import HomeAssistant + +from .common import TEST_BACKUP_PATH_ABC123 + + +@pytest.fixture(name="mocked_json_bytes") +def mocked_json_bytes_fixture() -> Generator[Mock]: + """Mock json_bytes.""" + with patch( + "homeassistant.components.backup.manager.json_bytes", + return_value=b"{}", # Empty JSON + ) as mocked_json_bytes: + yield mocked_json_bytes + + +@pytest.fixture(name="mocked_tarfile") +def mocked_tarfile_fixture() -> Generator[Mock]: + """Mock tarfile.""" + with patch( + "homeassistant.components.backup.manager.SecureTarFile" + ) as mocked_tarfile: + yield mocked_tarfile + + +@pytest.fixture(name="path_glob") +def path_glob_fixture() -> Generator[MagicMock]: + """Mock path glob.""" + with patch( + "pathlib.Path.glob", return_value=[TEST_BACKUP_PATH_ABC123] + ) as path_glob: + yield path_glob + + +CONFIG_DIR = { + "testing_config": [ + Path("test.txt"), + Path(".DS_Store"), + Path(".storage"), + Path("backups"), + Path("tmp_backups"), + Path("home-assistant_v2.db"), + ], + "backups": [ + Path("backups/backup.tar"), + Path("backups/not_backup"), + ], + "tmp_backups": [ + Path("tmp_backups/forgotten_backup.tar"), + Path("tmp_backups/not_backup"), + ], +} +CONFIG_DIR_DIRS = {Path(".storage"), Path("backups"), Path("tmp_backups")} + + +@pytest.fixture(name="mock_backup_generation") +def mock_backup_generation_fixture( + hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock +) -> Generator[None]: + """Mock backup generator.""" + + with ( + patch("pathlib.Path.iterdir", lambda x: CONFIG_DIR.get(x.name, [])), + patch("pathlib.Path.stat", return_value=MagicMock(st_size=123)), + patch("pathlib.Path.is_file", lambda x: x not in CONFIG_DIR_DIRS), + patch("pathlib.Path.is_dir", lambda x: x in CONFIG_DIR_DIRS), + patch( + "pathlib.Path.exists", + lambda x: x + not in ( + Path(hass.config.path("backups")), + Path(hass.config.path("tmp_backups")), + ), + ), + patch( + "pathlib.Path.is_symlink", + lambda _: False, + ), + patch( + "pathlib.Path.mkdir", + MagicMock(), + ), + patch( + "homeassistant.components.backup.manager.HAVERSION", + "2025.1.0", + ), + ): + yield diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr new file mode 100644 index 00000000000..b350ff680ee --- /dev/null +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -0,0 +1,206 @@ +# serializer version: 1 +# name: test_delete_backup[found_backups0-True-1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_backup[found_backups1-False-0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_backup[found_backups2-True-0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[None] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[None].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect1] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect2] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect2].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect3] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect3].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect4] + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_load_backups[side_effect4].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 096df37d704..8bd4e2817b2 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -1,4 +1,32 @@ # serializer version: 1 +# name: test_agent_delete_backup + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_agents_info + dict({ + 'id': 1, + 'result': dict({ + 'agents': list([ + dict({ + 'agent_id': 'backup.local', + }), + dict({ + 'agent_id': 'domain.test', + }), + ]), + }), + 'success': True, + 'type': 'result', + }) +# --- # name: test_backup_end[with_hassio-hass_access_token] dict({ 'error': dict({ @@ -40,7 +68,7 @@ 'type': 'result', }) # --- -# name: test_backup_end_excepion[exception0] +# name: test_backup_end_exception[exception0] dict({ 'error': dict({ 'code': 'post_backup_actions_failed', @@ -51,7 +79,7 @@ 'type': 'result', }) # --- -# name: test_backup_end_excepion[exception1] +# name: test_backup_end_exception[exception1] dict({ 'error': dict({ 'code': 'post_backup_actions_failed', @@ -62,7 +90,7 @@ 'type': 'result', }) # --- -# name: test_backup_end_excepion[exception2] +# name: test_backup_end_exception[exception2] dict({ 'error': dict({ 'code': 'post_backup_actions_failed', @@ -114,7 +142,7 @@ 'type': 'result', }) # --- -# name: test_backup_start_excepion[exception0] +# name: test_backup_start_exception[exception0] dict({ 'error': dict({ 'code': 'pre_backup_actions_failed', @@ -125,7 +153,7 @@ 'type': 'result', }) # --- -# name: test_backup_start_excepion[exception1] +# name: test_backup_start_exception[exception1] dict({ 'error': dict({ 'code': 'pre_backup_actions_failed', @@ -136,7 +164,7 @@ 'type': 'result', }) # --- -# name: test_backup_start_excepion[exception2] +# name: test_backup_start_exception[exception2] dict({ 'error': dict({ 'code': 'pre_backup_actions_failed', @@ -147,121 +175,2666 @@ 'type': 'result', }) # --- -# name: test_details[with_hassio-with_backup_content] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_details[with_hassio-without_backup_content] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_details[without_hassio-with_backup_content] +# name: test_config_info[None] dict({ 'id': 1, 'result': dict({ - 'backup': dict({ - 'date': '1970-01-01T00:00:00.000Z', - 'name': 'Test', - 'path': 'abc123.tar', - 'size': 0.0, - 'slug': 'abc123', + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), }), }), 'success': True, 'type': 'result', }) # --- -# name: test_details[without_hassio-without_backup_content] +# name: test_config_info[storage_data1] dict({ 'id': 1, 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': list([ + 'test-addon', + ]), + 'include_all_addons': True, + 'include_database': True, + 'include_folders': list([ + 'media', + ]), + 'name': 'test-name', + 'password': 'test-password', + }), + 'last_attempted_strategy_backup': '2024-10-26T04:45:00+01:00', + 'last_completed_strategy_backup': '2024-10-26T04:45:00+01:00', + 'retention': dict({ + 'copies': 3, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data2] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data3] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': '2024-10-27T04:45:00+01:00', + 'last_completed_strategy_backup': '2024-10-26T04:45:00+01:00', + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data4] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'mon', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_info[storage_data5] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': False, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'sat', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command0] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command0].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command0].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command10] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command10].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command10].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command1] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command1].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command1].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command2] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command2].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'mon', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command2].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'mon', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command3] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command3].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command3].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command4] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command4].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': list([ + 'test-addon', + ]), + 'include_all_addons': False, + 'include_database': True, + 'include_folders': list([ + 'media', + ]), + 'name': 'test-name', + 'password': 'test-password', + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command4].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': list([ + 'test-addon', + ]), + 'include_all_addons': False, + 'include_database': True, + 'include_folders': list([ + 'media', + ]), + 'name': 'test-name', + 'password': 'test-password', + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command5] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command5].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command5].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command6] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command6].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command6].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command7] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command7].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command7].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command8] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command8].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command8].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': 7, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update[command9] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command9].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update[command9].2 + dict({ + 'data': dict({ + 'backups': list([ + ]), + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + 'test-agent', + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': 3, + 'days': None, + }), + 'schedule': dict({ + 'state': 'daily', + }), + }), + }), + 'key': 'backup', + 'minor_version': 1, + 'version': 1, + }) +# --- +# name: test_config_update_errors[command0] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command0].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents0-backups0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents0-backups0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents0-backups0].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents1-backups1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents1-backups1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents1-backups1].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents2-backups2] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents2-backups2].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents2-backups2].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents3-backups3] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'def456', + 'database_included': False, + 'date': '1980-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test 2', + 'protected': False, + 'size': 1, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents3-backups3].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents3-backups3].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'def456', + 'database_included': False, + 'date': '1980-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test 2', + 'protected': False, + 'size': 1, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents4-backups4] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents4-backups4].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete[remote_agents4-backups4].2 + dict({ + 'id': 3, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[BackupAgentUnreachableError-storage_data1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + 'test.remote', + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[None-storage_data1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data0] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Boom!', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data0].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data1] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Boom!', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_delete_with_errors[side_effect1-storage_data1].1 + dict({ + 'id': 2, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'domain.test', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00Z', + 'failed_agent_ids': list([ + 'test.remote', + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 13, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents0-backups0] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), 'backup': None, }), 'success': True, 'type': 'result', }) # --- -# name: test_generate[with_hassio] - dict({ - 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', - }), - 'id': 1, - 'success': False, - 'type': 'result', - }) -# --- -# name: test_generate[without_hassio] +# name: test_details[remote_agents1-backups1] dict({ 'id': 1, 'result': dict({ - 'date': '1970-01-01T00:00:00.000Z', - 'name': 'Test', - 'path': 'abc123.tar', - 'size': 0.0, - 'slug': 'abc123', + 'agent_errors': dict({ + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), }), 'success': True, 'type': 'result', }) # --- -# name: test_info[with_hassio] +# name: test_details[remote_agents2-backups2] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents3-backups3] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details[remote_agents4-backups4] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details_with_errors[BackupAgentUnreachableError] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + 'backup': dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_details_with_errors[side_effect0] dict({ 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', + 'code': 'home_assistant_error', + 'message': 'Boom!', }), 'id': 1, 'success': False, 'type': 'result', }) # --- -# name: test_info[without_hassio] +# name: test_generate[None] + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[None].2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].3 + dict({ + 'id': 2, + 'result': dict({ + 'backup_job_id': '27f5c632', + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[None].4 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'home_assistant', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].5 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'upload_to_agents', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[None].6 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'completed', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1] + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data1].2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].3 + dict({ + 'id': 2, + 'result': dict({ + 'backup_job_id': '27f5c632', + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data1].4 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'home_assistant', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].5 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'upload_to_agents', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data1].6 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'completed', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2] + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data2].2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].3 + dict({ + 'id': 2, + 'result': dict({ + 'backup_job_id': '27f5c632', + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_generate[data2].4 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'home_assistant', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].5 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': 'upload_to_agents', + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_generate[data2].6 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'completed', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_info[remote_agents0-remote_backups0] dict({ 'id': 1, 'result': dict({ - 'backing_up': False, + 'agent_errors': dict({ + }), 'backups': list([ dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', 'name': 'Test', - 'path': 'abc123.tar', - 'size': 0.0, - 'slug': 'abc123', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, }), ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, }), 'success': True, 'type': 'result', }) # --- -# name: test_remove[with_hassio] +# name: test_info[remote_agents1-remote_backups1] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info[remote_agents2-remote_backups2] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'test.remote', + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info[remote_agents3-remote_backups3] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backups': list([ + dict({ + 'addons': list([ + ]), + 'agent_ids': list([ + 'test.remote', + ]), + 'backup_id': 'def456', + 'database_included': False, + 'date': '1980-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test 2', + 'protected': False, + 'size': 1, + 'with_strategy_settings': False, + }), + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info_with_errors[BackupAgentUnreachableError] + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + 'domain.test': 'The backup agent is unreachable.', + }), + 'backups': list([ + dict({ + 'addons': list([ + dict({ + 'name': 'Test', + 'slug': 'test', + 'version': '1.0.0', + }), + ]), + 'agent_ids': list([ + 'backup.local', + ]), + 'backup_id': 'abc123', + 'database_included': True, + 'date': '1970-01-01T00:00:00.000Z', + 'failed_agent_ids': list([ + ]), + 'folders': list([ + 'media', + 'share', + ]), + 'homeassistant_included': True, + 'homeassistant_version': '2024.12.0', + 'name': 'Test', + 'protected': False, + 'size': 0, + 'with_strategy_settings': False, + }), + ]), + 'last_attempted_strategy_backup': None, + 'last_completed_strategy_backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_info_with_errors[side_effect0] dict({ 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', + 'code': 'home_assistant_error', + 'message': 'Boom!', }), 'id': 1, 'success': False, 'type': 'result', }) # --- -# name: test_remove[without_hassio] +# name: test_restore_local_agent[backups0] + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Backup abc123 not found in agent backup.local', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_restore_local_agent[backups0].1 + 0 +# --- +# name: test_restore_local_agent[backups1] dict({ 'id': 1, 'result': None, @@ -269,18 +2842,24 @@ 'type': 'result', }) # --- -# name: test_restore[with_hassio] +# name: test_restore_local_agent[backups1].1 + 1 +# --- +# name: test_restore_remote_agent[remote_agents0-backups0] dict({ 'error': dict({ - 'code': 'unknown_command', - 'message': 'Unknown command.', + 'code': 'home_assistant_error', + 'message': 'Backup abc123 not found in agent test.remote', }), 'id': 1, 'success': False, 'type': 'result', }) # --- -# name: test_restore[without_hassio] +# name: test_restore_remote_agent[remote_agents0-backups0].1 + 0 +# --- +# name: test_restore_remote_agent[remote_agents1-backups1] dict({ 'id': 1, 'result': None, @@ -288,3 +2867,34 @@ 'type': 'result', }) # --- +# name: test_restore_remote_agent[remote_agents1-backups1].1 + 1 +# --- +# name: test_subscribe_event + dict({ + 'event': dict({ + 'manager_state': 'idle', + }), + 'id': 1, + 'type': 'event', + }) +# --- +# name: test_subscribe_event.1 + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- +# name: test_subscribe_event.2 + dict({ + 'event': dict({ + 'manager_state': 'create_backup', + 'stage': None, + 'state': 'in_progress', + }), + 'id': 1, + 'type': 'event', + }) +# --- diff --git a/tests/components/backup/test_backup.py b/tests/components/backup/test_backup.py new file mode 100644 index 00000000000..02252ef6fa5 --- /dev/null +++ b/tests/components/backup/test_backup.py @@ -0,0 +1,129 @@ +"""Test the builtin backup platform.""" + +from __future__ import annotations + +from collections.abc import Generator +from io import StringIO +import json +from pathlib import Path +from tarfile import TarError +from unittest.mock import MagicMock, mock_open, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.backup import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .common import TEST_BACKUP_ABC123, TEST_BACKUP_PATH_ABC123 + +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +@pytest.fixture(name="read_backup") +def read_backup_fixture(path_glob: MagicMock) -> Generator[MagicMock]: + """Mock read backup.""" + with patch( + "homeassistant.components.backup.backup.read_backup", + return_value=TEST_BACKUP_ABC123, + ) as read_backup: + yield read_backup + + +@pytest.mark.parametrize( + "side_effect", + [ + None, + OSError("Boom"), + TarError("Boom"), + json.JSONDecodeError("Boom", "test", 1), + KeyError("Boom"), + ], +) +async def test_load_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + read_backup: MagicMock, + side_effect: Exception | None, +) -> None: + """Test load backups.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_ws_client(hass) + read_backup.side_effect = side_effect + + # list agents + await client.send_json_auto_id({"type": "backup/agents/info"}) + assert await client.receive_json() == snapshot + + # load and list backups + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + +async def test_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test upload backup.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_client() + open_mock = mock_open() + + with ( + patch("pathlib.Path.open", open_mock), + patch("shutil.move") as move_mock, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=TEST_BACKUP_ABC123, + ), + ): + resp = await client.post( + "/api/backup/upload?agent_id=backup.local", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert open_mock.call_count == 1 + assert move_mock.call_count == 1 + assert move_mock.mock_calls[0].args[1].name == "abc123.tar" + + +@pytest.mark.usefixtures("read_backup") +@pytest.mark.parametrize( + ("found_backups", "backup_exists", "unlink_calls"), + [ + ([TEST_BACKUP_PATH_ABC123], True, 1), + ([TEST_BACKUP_PATH_ABC123], False, 0), + (([], True, 0)), + ], +) +async def test_delete_backup( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + path_glob: MagicMock, + found_backups: list[Path], + backup_exists: bool, + unlink_calls: int, +) -> None: + """Test delete backup.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_ws_client(hass) + path_glob.return_value = found_backups + + with ( + patch("pathlib.Path.exists", return_value=backup_exists), + patch("pathlib.Path.unlink") as unlink, + ): + await client.send_json_auto_id( + {"type": "backup/delete", "backup_id": TEST_BACKUP_ABC123.backup_id} + ) + assert await client.receive_json() == snapshot + + assert unlink.call_count == unlink_calls diff --git a/tests/components/backup/test_http.py b/tests/components/backup/test_http.py index 76b1f76b55b..c071a0d8386 100644 --- a/tests/components/backup/test_http.py +++ b/tests/components/backup/test_http.py @@ -7,27 +7,28 @@ from unittest.mock import patch from aiohttp import web import pytest +from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.core import HomeAssistant -from .common import TEST_BACKUP, setup_backup_integration +from .common import TEST_BACKUP_ABC123, BackupAgentTest, setup_backup_integration from tests.common import MockUser from tests.typing import ClientSessionGenerator -async def test_downloading_backup( +async def test_downloading_local_backup( hass: HomeAssistant, hass_client: ClientSessionGenerator, ) -> None: - """Test downloading a backup file.""" + """Test downloading a local backup file.""" await setup_backup_integration(hass) client = await hass_client() with ( patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backup", - return_value=TEST_BACKUP, + "homeassistant.components.backup.backup.CoreLocalBackupAgent.async_get_backup", + return_value=TEST_BACKUP_ABC123, ), patch("pathlib.Path.exists", return_value=True), patch( @@ -35,10 +36,29 @@ async def test_downloading_backup( return_value=web.Response(text=""), ), ): - resp = await client.get("/api/backup/download/abc123") + resp = await client.get("/api/backup/download/abc123?agent_id=backup.local") assert resp.status == 200 +async def test_downloading_remote_backup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test downloading a remote backup.""" + await setup_backup_integration(hass) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_client() + + with ( + patch.object(BackupAgentTest, "async_download_backup") as download_mock, + ): + download_mock.return_value.__aiter__.return_value = iter((b"backup data",)) + resp = await client.get("/api/backup/download/abc123?agent_id=domain.test") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + async def test_downloading_backup_not_found( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -48,7 +68,7 @@ async def test_downloading_backup_not_found( client = await hass_client() - resp = await client.get("/api/backup/download/abc123") + resp = await client.get("/api/backup/download/abc123?agent_id=backup.local") assert resp.status == 404 @@ -63,7 +83,7 @@ async def test_downloading_as_non_admin( client = await hass_client() - resp = await client.get("/api/backup/download/abc123") + resp = await client.get("/api/backup/download/abc123?agent_id=backup.local") assert resp.status == 401 @@ -80,7 +100,7 @@ async def test_uploading_a_backup_file( "homeassistant.components.backup.manager.BackupManager.async_receive_backup", ) as async_receive_backup_mock: resp = await client.post( - "/api/backup/upload", + "/api/backup/upload?agent_id=backup.local", data={"file": StringIO("test")}, ) assert resp.status == 201 @@ -90,7 +110,7 @@ async def test_uploading_a_backup_file( @pytest.mark.parametrize( ("error", "message"), [ - (OSError("Boom!"), "Can't write backup file Boom!"), + (OSError("Boom!"), "Can't write backup file: Boom!"), (asyncio.CancelledError("Boom!"), ""), ], ) @@ -110,7 +130,7 @@ async def test_error_handling_uploading_a_backup_file( side_effect=error, ): resp = await client.post( - "/api/backup/upload", + "/api/backup/upload?agent_id=backup.local", data={"file": StringIO("test")}, ) assert resp.status == 500 diff --git a/tests/components/backup/test_init.py b/tests/components/backup/test_init.py index e064939d618..16a49af9647 100644 --- a/tests/components/backup/test_init.py +++ b/tests/components/backup/test_init.py @@ -1,15 +1,18 @@ """Tests for the Backup integration.""" +from typing import Any from unittest.mock import patch import pytest -from homeassistant.components.backup.const import DOMAIN +from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceNotFound from .common import setup_backup_integration +@pytest.mark.usefixtures("supervisor_client") async def test_setup_with_hassio( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -20,14 +23,14 @@ async def test_setup_with_hassio( with_hassio=True, configuration={DOMAIN: {}}, ) - assert ( - "The backup integration is not supported on this installation method, please" - " remove it from your configuration" - ) in caplog.text + manager = hass.data[DATA_MANAGER] + assert not manager.backup_agents +@pytest.mark.parametrize("service_data", [None, {}]) async def test_create_service( hass: HomeAssistant, + service_data: dict[str, Any] | None, ) -> None: """Test generate backup.""" await setup_backup_integration(hass) @@ -39,6 +42,15 @@ async def test_create_service( DOMAIN, "create", blocking=True, + service_data=service_data, ) assert generate_backup.called + + +async def test_create_service_with_hassio(hass: HomeAssistant) -> None: + """Test action backup.create does not exist with hassio.""" + await setup_backup_integration(hass, with_hassio=True) + + with pytest.raises(ServiceNotFound): + await hass.services.async_call(DOMAIN, "create", blocking=True) diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index a3f70267643..f335ea5c0ee 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -2,199 +2,527 @@ from __future__ import annotations -from pathlib import Path -from unittest.mock import AsyncMock, MagicMock, Mock, mock_open, patch +import asyncio +from collections.abc import Generator +from io import StringIO +import json +from typing import Any +from unittest.mock import ANY, AsyncMock, MagicMock, Mock, call, mock_open, patch -import aiohttp -from multidict import CIMultiDict, CIMultiDictProxy import pytest -from homeassistant.components.backup import BackupManager -from homeassistant.components.backup.manager import BackupPlatformProtocol +from homeassistant.components.backup import ( + DOMAIN, + AgentBackup, + BackupAgentPlatformProtocol, + BackupManager, + BackupPlatformProtocol, + Folder, + backup as local_backup_platform, +) +from homeassistant.components.backup.const import DATA_MANAGER +from homeassistant.components.backup.manager import ( + BackupManagerState, + CoreBackupReaderWriter, + CreateBackupEvent, + CreateBackupStage, + CreateBackupState, + NewBackup, + WrittenBackup, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from .common import TEST_BACKUP +from .common import ( + LOCAL_AGENT_ID, + TEST_BACKUP_ABC123, + TEST_BACKUP_DEF456, + BackupAgentTest, +) from tests.common import MockPlatform, mock_platform +from tests.typing import ClientSessionGenerator, WebSocketGenerator + +_EXPECTED_FILES = [ + "test.txt", + ".storage", + "backups", + "backups/not_backup", + "tmp_backups", + "tmp_backups/not_backup", +] +_EXPECTED_FILES_WITH_DATABASE = { + True: [*_EXPECTED_FILES, "home-assistant_v2.db"], + False: _EXPECTED_FILES, +} -async def _mock_backup_generation(manager: BackupManager): - """Mock backup generator.""" - - def _mock_iterdir(path: Path) -> list[Path]: - if not path.name.endswith("testing_config"): - return [] - return [ - Path("test.txt"), - Path(".DS_Store"), - Path(".storage"), - ] - - with ( - patch( - "homeassistant.components.backup.manager.SecureTarFile" - ) as mocked_tarfile, - patch("pathlib.Path.iterdir", _mock_iterdir), - patch("pathlib.Path.stat", MagicMock(st_size=123)), - patch("pathlib.Path.is_file", lambda x: x.name != ".storage"), - patch( - "pathlib.Path.is_dir", - lambda x: x.name == ".storage", - ), - patch( - "pathlib.Path.exists", - lambda x: x != manager.backup_dir, - ), - patch( - "pathlib.Path.is_symlink", - lambda _: False, - ), - patch( - "pathlib.Path.mkdir", - MagicMock(), - ), - patch( - "homeassistant.components.backup.manager.json_bytes", - return_value=b"{}", # Empty JSON - ) as mocked_json_bytes, - patch( - "homeassistant.components.backup.manager.HAVERSION", - "2025.1.0", - ), - ): - await manager.async_create_backup() - - assert mocked_json_bytes.call_count == 1 - backup_json_dict = mocked_json_bytes.call_args[0][0] - assert isinstance(backup_json_dict, dict) - assert backup_json_dict["homeassistant"] == {"version": "2025.1.0"} - assert manager.backup_dir.as_posix() in str( - mocked_tarfile.call_args_list[0][0][0] - ) - - -async def _setup_mock_domain( +async def _setup_backup_platform( hass: HomeAssistant, - platform: BackupPlatformProtocol | None = None, + *, + domain: str = "some_domain", + platform: BackupPlatformProtocol | BackupAgentPlatformProtocol | None = None, ) -> None: """Set up a mock domain.""" - mock_platform(hass, "some_domain.backup", platform or MockPlatform()) - assert await async_setup_component(hass, "some_domain", {}) + mock_platform(hass, f"{domain}.backup", platform or MockPlatform()) + assert await async_setup_component(hass, domain, {}) + await hass.async_block_till_done() -async def test_constructor(hass: HomeAssistant) -> None: - """Test BackupManager constructor.""" - manager = BackupManager(hass) - assert manager.backup_dir.as_posix() == hass.config.path("backups") +@pytest.fixture(autouse=True) +def mock_delay_save() -> Generator[None]: + """Mock the delay save constant.""" + with patch("homeassistant.components.backup.store.STORE_DELAY_SAVE", 0): + yield -async def test_load_backups(hass: HomeAssistant) -> None: - """Test loading backups.""" - manager = BackupManager(hass) - with ( - patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]), - patch("tarfile.open", return_value=MagicMock()), - patch( - "homeassistant.components.backup.manager.json_loads_object", - return_value={ - "slug": TEST_BACKUP.slug, - "name": TEST_BACKUP.name, - "date": TEST_BACKUP.date, - }, +@pytest.fixture(name="generate_backup_id") +def generate_backup_id_fixture() -> Generator[MagicMock]: + """Mock generate backup id.""" + with patch("homeassistant.components.backup.manager._generate_backup_id") as mock: + mock.return_value = "abc123" + yield mock + + +@pytest.mark.usefixtures("mock_backup_generation") +async def test_async_create_backup( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mocked_json_bytes: Mock, + mocked_tarfile: Mock, +) -> None: + """Test create backup.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + new_backup = NewBackup(backup_job_id="time-123") + backup_task = AsyncMock( + return_value=WrittenBackup( + backup=TEST_BACKUP_ABC123, + open_stream=AsyncMock(), + release_stream=AsyncMock(), ), - patch( - "pathlib.Path.stat", - return_value=MagicMock(st_size=TEST_BACKUP.size), - ), - ): - await manager.load_backups() - backups = await manager.async_get_backups() - assert backups == {TEST_BACKUP.slug: TEST_BACKUP} + )() # call it so that it can be awaited + with patch( + "homeassistant.components.backup.manager.CoreBackupReaderWriter.async_create_backup", + return_value=(new_backup, backup_task), + ) as create_backup: + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) -async def test_load_backups_with_exception( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test loading backups with exception.""" - manager = BackupManager(hass) - with ( - patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]), - patch("tarfile.open", side_effect=OSError("Test exception")), - ): - await manager.load_backups() - backups = await manager.async_get_backups() - assert f"Unable to read backup {TEST_BACKUP.path}: Test exception" in caplog.text - assert backups == {} - - -async def test_removing_backup( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test removing backup.""" - manager = BackupManager(hass) - manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} - manager.loaded_backups = True - - with patch("pathlib.Path.exists", return_value=True): - await manager.async_remove_backup(slug=TEST_BACKUP.slug) - assert "Removed backup located at" in caplog.text - - -async def test_removing_non_existing_backup( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test removing not existing backup.""" - manager = BackupManager(hass) - - await manager.async_remove_backup(slug="non_existing") - assert "Removed backup located at" not in caplog.text - - -async def test_getting_backup_that_does_not_exist( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test getting backup that does not exist.""" - manager = BackupManager(hass) - manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} - manager.loaded_backups = True - - with patch("pathlib.Path.exists", return_value=False): - backup = await manager.async_get_backup(slug=TEST_BACKUP.slug) - assert backup is None - - assert ( - f"Removing tracked backup ({TEST_BACKUP.slug}) that " - f"does not exists on the expected path {TEST_BACKUP.path}" - ) in caplog.text + assert create_backup.called + assert create_backup.call_args == call( + agent_ids=["backup.local"], + backup_name="Core 2025.1.0", + include_addons=None, + include_all_addons=False, + include_database=True, + include_folders=None, + include_homeassistant=True, + on_progress=ANY, + password=None, + ) async def test_async_create_backup_when_backing_up(hass: HomeAssistant) -> None: """Test generate backup.""" - manager = BackupManager(hass) - manager.backing_up = True - with pytest.raises(HomeAssistantError, match="Backup already in progress"): - await manager.async_create_backup() + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) + manager.last_event = CreateBackupEvent( + stage=None, state=CreateBackupState.IN_PROGRESS + ) + with pytest.raises(HomeAssistantError, match="Backup manager busy"): + await manager.async_create_backup( + agent_ids=[LOCAL_AGENT_ID], + include_addons=[], + include_all_addons=False, + include_database=True, + include_folders=[], + include_homeassistant=True, + name=None, + password=None, + ) -async def test_async_create_backup( +@pytest.mark.parametrize( + ("parameters", "expected_error"), + [ + ({"agent_ids": []}, "At least one agent must be selected"), + ({"agent_ids": ["non_existing"]}, "Invalid agent selected"), + ( + {"include_addons": ["ssl"], "include_all_addons": True}, + "Cannot include all addons and specify specific addons", + ), + ({"include_homeassistant": False}, "Home Assistant must be included in backup"), + ], +) +async def test_create_backup_wrong_parameters( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + parameters: dict[str, Any], + expected_error: str, +) -> None: + """Test create backup with wrong parameters.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + ws_client = await hass_ws_client(hass) + + default_parameters = { + "agent_ids": [LOCAL_AGENT_ID], + "include_addons": [], + "include_all_addons": False, + "include_database": True, + "include_folders": [], + "include_homeassistant": True, + } + + await ws_client.send_json_auto_id( + {"type": "backup/generate"} | default_parameters | parameters + ) + result = await ws_client.receive_json() + + assert result["success"] is False + assert result["error"]["code"] == "home_assistant_error" + assert result["error"]["message"] == expected_error + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("agent_ids", "backup_directory", "temp_file_unlink_call_count"), + [ + ([LOCAL_AGENT_ID], "backups", 0), + (["test.remote"], "tmp_backups", 1), + ([LOCAL_AGENT_ID, "test.remote"], "backups", 0), + ], +) +@pytest.mark.parametrize( + "params", + [ + {}, + {"include_database": True, "name": "abc123"}, + {"include_database": False}, + {"password": "pass123"}, + ], +) +async def test_async_initiate_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, + mocked_json_bytes: Mock, + mocked_tarfile: Mock, + generate_backup_id: MagicMock, + path_glob: MagicMock, + params: dict[str, Any], + agent_ids: list[str], + backup_directory: str, + temp_file_unlink_call_count: int, ) -> None: """Test generate backup.""" - manager = BackupManager(hass) - manager.loaded_backups = True + local_agent = local_backup_platform.CoreLocalBackupAgent(hass) + remote_agent = BackupAgentTest("remote", backups=[]) + agents = { + f"backup.{local_agent.name}": local_agent, + f"test.{remote_agent.name}": remote_agent, + } + with patch( + "homeassistant.components.backup.backup.async_get_backup_agents" + ) as core_get_backup_agents: + core_get_backup_agents.return_value = [local_agent] + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) - await _mock_backup_generation(manager) + ws_client = await hass_ws_client(hass) - assert "Generated new backup with slug " in caplog.text - assert "Creating backup directory" in caplog.text - assert "Loaded 0 platforms" in caplog.text + include_database = params.get("include_database", True) + name = params.get("name", "Core 2025.1.0") + password = params.get("password") + path_glob.return_value = [] + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + + assert result["success"] is True + assert result["result"] == { + "backups": [], + "agent_errors": {}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + with ( + patch("pathlib.Path.open", mock_open(read_data=b"test")), + patch("pathlib.Path.unlink") as unlink_mock, + ): + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": agent_ids} | params + ) + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.IN_PROGRESS, + } + result = await ws_client.receive_json() + assert result["success"] is True + + backup_id = result["result"]["backup_job_id"] + assert backup_id == generate_backup_id.return_value + + await hass.async_block_till_done() + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.HOME_ASSISTANT, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.UPLOAD_TO_AGENTS, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.COMPLETED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + assert unlink_mock.call_count == temp_file_unlink_call_count + + assert mocked_json_bytes.call_count == 1 + backup_json_dict = mocked_json_bytes.call_args[0][0] + assert isinstance(backup_json_dict, dict) + assert backup_json_dict == { + "compressed": True, + "date": ANY, + "homeassistant": { + "exclude_database": not include_database, + "version": "2025.1.0", + }, + "name": name, + "protected": bool(password), + "slug": ANY, + "type": "partial", + "version": 2, + } + + await ws_client.send_json_auto_id( + {"type": "backup/details", "backup_id": backup_id} + ) + result = await ws_client.receive_json() + + backup_data = result["result"]["backup"] + backup_agent_ids = backup_data.pop("agent_ids") + + assert backup_agent_ids == agent_ids + + backup = AgentBackup.from_dict(backup_data) + + assert backup == AgentBackup( + addons=[], + backup_id=ANY, + database_included=include_database, + date=ANY, + folders=[], + homeassistant_included=True, + homeassistant_version="2025.1.0", + name=name, + protected=bool(password), + size=ANY, + ) + for agent_id in agent_ids: + agent = agents[agent_id] + assert len(agent._backups) == 1 + agent_backup = agent._backups[backup.backup_id] + assert agent_backup.backup_id == backup.backup_id + assert agent_backup.date == backup.date + assert agent_backup.name == backup.name + assert agent_backup.protected == backup.protected + assert agent_backup.size == backup.size + + outer_tar = mocked_tarfile.return_value + core_tar = outer_tar.create_inner_tar.return_value.__enter__.return_value + expected_files = [call(hass.config.path(), arcname="data", recursive=False)] + [ + call(file, arcname=f"data/{file}", recursive=False) + for file in _EXPECTED_FILES_WITH_DATABASE[include_database] + ] + assert core_tar.add.call_args_list == expected_files + + tar_file_path = str(mocked_tarfile.call_args_list[0][0][0]) + backup_directory = hass.config.path(backup_directory) + assert tar_file_path == f"{backup_directory}/{backup.backup_id}.tar" + + +@pytest.mark.usefixtures("mock_backup_generation") +async def test_async_initiate_backup_with_agent_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mocked_json_bytes: Mock, + mocked_tarfile: Mock, + generate_backup_id: MagicMock, + path_glob: MagicMock, + hass_storage: dict[str, Any], +) -> None: + """Test generate backup.""" + agent_ids = [LOCAL_AGENT_ID, "test.remote"] + local_agent = local_backup_platform.CoreLocalBackupAgent(hass) + remote_agent = BackupAgentTest("remote", backups=[]) + + with patch( + "homeassistant.components.backup.backup.async_get_backup_agents" + ) as core_get_backup_agents: + core_get_backup_agents.return_value = [local_agent] + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + ws_client = await hass_ws_client(hass) + + path_glob.return_value = [] + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + + assert result["success"] is True + assert result["result"] == { + "backups": [], + "agent_errors": {}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + result = await ws_client.receive_json() + assert result["success"] is True + + with ( + patch("pathlib.Path.open", mock_open(read_data=b"test")), + patch.object( + remote_agent, "async_upload_backup", side_effect=Exception("Test exception") + ), + ): + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": agent_ids} + ) + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.IN_PROGRESS, + } + result = await ws_client.receive_json() + assert result["success"] is True + + backup_id = result["result"]["backup_job_id"] + assert backup_id == generate_backup_id.return_value + + await hass.async_block_till_done() + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.HOME_ASSISTANT, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": CreateBackupStage.UPLOAD_TO_AGENTS, + "state": CreateBackupState.IN_PROGRESS, + } + + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": BackupManagerState.CREATE_BACKUP, + "stage": None, + "state": CreateBackupState.COMPLETED, + } + + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": BackupManagerState.IDLE} + + expected_backup_data = { + "addons": [], + "agent_ids": ["backup.local"], + "backup_id": "abc123", + "database_included": True, + "date": ANY, + "failed_agent_ids": ["test.remote"], + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2025.1.0", + "name": "Core 2025.1.0", + "protected": False, + "size": 123, + "with_strategy_settings": False, + } + + await ws_client.send_json_auto_id( + {"type": "backup/details", "backup_id": backup_id} + ) + result = await ws_client.receive_json() + assert result["result"] == { + "agent_errors": {}, + "backup": expected_backup_data, + } + + await ws_client.send_json_auto_id({"type": "backup/info"}) + result = await ws_client.receive_json() + assert result["result"] == { + "agent_errors": {}, + "backups": [expected_backup_data], + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + await hass.async_block_till_done() + assert hass_storage[DOMAIN]["data"]["backups"] == [ + { + "backup_id": "abc123", + "failed_agent_ids": ["test.remote"], + "with_strategy_settings": False, + } + ] async def test_loading_platforms( @@ -202,198 +530,384 @@ async def test_loading_platforms( caplog: pytest.LogCaptureFixture, ) -> None: """Test loading backup platforms.""" - manager = BackupManager(hass) + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) - assert not manager.loaded_platforms assert not manager.platforms - await _setup_mock_domain( + await _setup_backup_platform( hass, - Mock( + platform=Mock( async_pre_backup=AsyncMock(), async_post_backup=AsyncMock(), + async_get_backup_agents=AsyncMock(), ), ) await manager.load_platforms() await hass.async_block_till_done() - assert manager.loaded_platforms assert len(manager.platforms) == 1 assert "Loaded 1 platforms" in caplog.text +@pytest.mark.parametrize( + "platform_mock", + [ + Mock(async_pre_backup=AsyncMock(), spec=["async_pre_backup"]), + Mock(async_post_backup=AsyncMock(), spec=["async_post_backup"]), + Mock(spec=[]), + ], +) async def test_not_loading_bad_platforms( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, + platform_mock: Mock, ) -> None: - """Test loading backup platforms.""" - manager = BackupManager(hass) - - assert not manager.loaded_platforms - assert not manager.platforms - - await _setup_mock_domain(hass) - await manager.load_platforms() + """Test not loading bad backup platforms.""" + await _setup_backup_platform( + hass, + domain="test", + platform=platform_mock, + ) + assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - assert manager.loaded_platforms - assert len(manager.platforms) == 0 - - assert "Loaded 0 platforms" in caplog.text - assert ( - "some_domain does not implement required functions for the backup platform" - in caplog.text - ) + assert platform_mock.mock_calls == [] -async def test_exception_plaform_pre(hass: HomeAssistant) -> None: +async def test_exception_platform_pre( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: """Test exception in pre step.""" - manager = BackupManager(hass) - manager.loaded_backups = True async def _mock_step(hass: HomeAssistant) -> None: raise HomeAssistantError("Test exception") - await _setup_mock_domain( + remote_agent = BackupAgentTest("remote", backups=[]) + await _setup_backup_platform( hass, - Mock( + domain="test", + platform=Mock( async_pre_backup=_mock_step, async_post_backup=AsyncMock(), + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), ), ) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() - with pytest.raises(HomeAssistantError): - await _mock_backup_generation(manager) + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) + + assert "Generating backup failed" in caplog.text + assert "Test exception" in caplog.text -async def test_exception_plaform_post(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_backup_generation") +async def test_exception_platform_post( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: """Test exception in post step.""" - manager = BackupManager(hass) - manager.loaded_backups = True async def _mock_step(hass: HomeAssistant) -> None: raise HomeAssistantError("Test exception") - await _setup_mock_domain( + remote_agent = BackupAgentTest("remote", backups=[]) + await _setup_backup_platform( hass, - Mock( + domain="test", + platform=Mock( async_pre_backup=AsyncMock(), async_post_backup=_mock_step, + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), ), ) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() - with pytest.raises(HomeAssistantError): - await _mock_backup_generation(manager) + await hass.services.async_call( + DOMAIN, + "create", + blocking=True, + ) + + assert "Generating backup failed" in caplog.text + assert "Test exception" in caplog.text -async def test_loading_platforms_when_running_async_pre_backup_actions( +@pytest.mark.parametrize( + ( + "agent_id_params", + "open_call_count", + "move_call_count", + "move_path_names", + "remote_agent_backups", + "remote_agent_backup_data", + "temp_file_unlink_call_count", + ), + [ + ( + "agent_id=backup.local&agent_id=test.remote", + 2, + 1, + ["abc123.tar"], + {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123}, + b"test", + 0, + ), + ( + "agent_id=backup.local", + 1, + 1, + ["abc123.tar"], + {}, + None, + 0, + ), + ( + "agent_id=test.remote", + 2, + 0, + [], + {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123}, + b"test", + 1, + ), + ], +) +async def test_receive_backup( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, + hass_client: ClientSessionGenerator, + agent_id_params: str, + open_call_count: int, + move_call_count: int, + move_path_names: list[str], + remote_agent_backups: dict[str, AgentBackup], + remote_agent_backup_data: bytes | None, + temp_file_unlink_call_count: int, ) -> None: - """Test loading backup platforms when running post backup actions.""" - manager = BackupManager(hass) - - assert not manager.loaded_platforms - assert not manager.platforms - - await _setup_mock_domain( + """Test receive backup and upload to the local and a remote agent.""" + remote_agent = BackupAgentTest("remote", backups=[]) + await _setup_backup_platform( hass, - Mock( - async_pre_backup=AsyncMock(), - async_post_backup=AsyncMock(), + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, ), ) - await manager.async_pre_backup_actions() + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_client() - assert manager.loaded_platforms - assert len(manager.platforms) == 1 + upload_data = "test" + open_mock = mock_open(read_data=upload_data.encode(encoding="utf-8")) - assert "Loaded 1 platforms" in caplog.text - - -async def test_loading_platforms_when_running_async_post_backup_actions( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test loading backup platforms when running post backup actions.""" - manager = BackupManager(hass) - - assert not manager.loaded_platforms - assert not manager.platforms - - await _setup_mock_domain( - hass, - Mock( - async_pre_backup=AsyncMock(), - async_post_backup=AsyncMock(), + with ( + patch("pathlib.Path.open", open_mock), + patch("shutil.move") as move_mock, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=TEST_BACKUP_ABC123, ), - ) - await manager.async_post_backup_actions() - - assert manager.loaded_platforms - assert len(manager.platforms) == 1 - - assert "Loaded 1 platforms" in caplog.text - - -async def test_async_receive_backup( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test receiving a backup file.""" - manager = BackupManager(hass) - - size = 2 * 2**16 - protocol = Mock(_reading_paused=False) - stream = aiohttp.StreamReader(protocol, 2**16) - stream.feed_data(b"0" * size + b"\r\n--:--") - stream.feed_eof() - - open_mock = mock_open() - - with patch("pathlib.Path.open", open_mock), patch("shutil.move") as mover_mock: - await manager.async_receive_backup( - contents=aiohttp.BodyPartReader( - b"--:", - CIMultiDictProxy( - CIMultiDict( - { - aiohttp.hdrs.CONTENT_DISPOSITION: "attachment; filename=abc123.tar" - } - ) - ), - stream, - ) + patch("pathlib.Path.unlink") as unlink_mock, + ): + resp = await client.post( + f"/api/backup/upload?{agent_id_params}", + data={"file": StringIO(upload_data)}, ) - assert open_mock.call_count == 1 - assert mover_mock.call_count == 1 - assert mover_mock.mock_calls[0].args[1].name == "abc123.tar" + await hass.async_block_till_done() + + assert resp.status == 201 + assert open_mock.call_count == open_call_count + assert move_mock.call_count == move_call_count + for index, name in enumerate(move_path_names): + assert move_mock.call_args_list[index].args[1].name == name + assert remote_agent._backups == remote_agent_backups + assert remote_agent._backup_data == remote_agent_backup_data + assert unlink_mock.call_count == temp_file_unlink_call_count +@pytest.mark.usefixtures("mock_backup_generation") +async def test_receive_backup_busy_manager( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test receive backup with a busy manager.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + client = await hass_client() + ws_client = await hass_ws_client(hass) + + upload_data = "test" + + await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) + result = await ws_client.receive_json() + assert result["event"] == {"manager_state": "idle"} + + result = await ws_client.receive_json() + assert result["success"] is True + + new_backup = NewBackup(backup_job_id="time-123") + backup_task: asyncio.Future[WrittenBackup] = asyncio.Future() + with patch( + "homeassistant.components.backup.manager.CoreBackupReaderWriter.async_create_backup", + return_value=(new_backup, backup_task), + ) as create_backup: + await ws_client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["backup.local"]} + ) + result = await ws_client.receive_json() + assert result["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + result = await ws_client.receive_json() + assert result["success"] is True + assert result["result"] == {"backup_job_id": "time-123"} + + assert create_backup.call_count == 1 + + resp = await client.post( + "/api/backup/upload?agent_id=backup.local", + data={"file": StringIO(upload_data)}, + ) + + assert resp.status == 500 + assert ( + await resp.text() + == "Can't upload backup file: Backup manager busy: create_backup" + ) + + # finish the backup + backup_task.set_result( + WrittenBackup( + backup=TEST_BACKUP_ABC123, + open_stream=AsyncMock(), + release_stream=AsyncMock(), + ) + ) + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ("agent_id", "password", "restore_database", "restore_homeassistant", "dir"), + [ + (LOCAL_AGENT_ID, None, True, False, "backups"), + (LOCAL_AGENT_ID, "abc123", False, True, "backups"), + ("test.remote", None, True, True, "tmp_backups"), + ], +) async def test_async_trigger_restore( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, + agent_id: str, + password: str | None, + restore_database: bool, + restore_homeassistant: bool, + dir: str, ) -> None: """Test trigger restore.""" - manager = BackupManager(hass) - manager.loaded_backups = True - manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) + hass.data[DATA_MANAGER] = manager + + await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock( + return_value=[BackupAgentTest("remote", backups=[TEST_BACKUP_ABC123])] + ), + spec_set=BackupAgentPlatformProtocol, + ), + ) + await manager.load_platforms() + + local_agent = manager.backup_agents[LOCAL_AGENT_ID] + local_agent._backups = {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123} + local_agent._loaded_backups = True with ( patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.open"), patch("pathlib.Path.write_text") as mocked_write_text, patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + patch.object(BackupAgentTest, "async_download_backup") as download_mock, ): - await manager.async_restore_backup(TEST_BACKUP.slug) - assert mocked_write_text.call_args[0][0] == '{"path": "abc123.tar"}' + download_mock.return_value.__aiter__.return_value = iter((b"backup data",)) + await manager.async_restore_backup( + TEST_BACKUP_ABC123.backup_id, + agent_id=agent_id, + password=password, + restore_addons=None, + restore_database=restore_database, + restore_folders=None, + restore_homeassistant=restore_homeassistant, + ) + expected_restore_file = json.dumps( + { + "path": f"{hass.config.path()}/{dir}/abc123.tar", + "password": password, + "remove_after_restore": agent_id != LOCAL_AGENT_ID, + "restore_database": restore_database, + "restore_homeassistant": restore_homeassistant, + } + ) + assert mocked_write_text.call_args[0][0] == expected_restore_file assert mocked_service_call.called -async def test_async_trigger_restore_missing_backup(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("parameters", "expected_error"), + [ + ( + {"backup_id": TEST_BACKUP_DEF456.backup_id}, + "Backup def456 not found", + ), + ( + {"restore_addons": ["blah"]}, + "Addons and folders are not supported in core restore", + ), + ( + {"restore_folders": [Folder.ADDONS]}, + "Addons and folders are not supported in core restore", + ), + ( + {"restore_database": False, "restore_homeassistant": False}, + "Home Assistant or database must be included in restore", + ), + ], +) +async def test_async_trigger_restore_wrong_parameters( + hass: HomeAssistant, parameters: dict[str, Any], expected_error: str +) -> None: """Test trigger restore.""" - manager = BackupManager(hass) - manager.loaded_backups = True + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) - with pytest.raises(HomeAssistantError, match="Backup abc123 not found"): - await manager.async_restore_backup(TEST_BACKUP.slug) + await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await manager.load_platforms() + + local_agent = manager.backup_agents[LOCAL_AGENT_ID] + local_agent._backups = {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123} + local_agent._loaded_backups = True + + default_parameters = { + "agent_id": LOCAL_AGENT_ID, + "backup_id": TEST_BACKUP_ABC123.backup_id, + "password": None, + "restore_addons": None, + "restore_database": True, + "restore_folders": None, + "restore_homeassistant": True, + } + + with ( + patch("pathlib.Path.exists", return_value=True), + pytest.raises(HomeAssistantError, match=expected_error), + ): + await manager.async_restore_backup(**(default_parameters | parameters)) diff --git a/tests/components/backup/test_models.py b/tests/components/backup/test_models.py new file mode 100644 index 00000000000..6a547f40dc3 --- /dev/null +++ b/tests/components/backup/test_models.py @@ -0,0 +1,11 @@ +"""Tests for the Backup integration.""" + +from homeassistant.components.backup import AgentBackup + +from .common import TEST_BACKUP_ABC123 + + +async def test_agent_backup_serialization() -> None: + """Test AgentBackup serialization.""" + + assert AgentBackup.from_dict(TEST_BACKUP_ABC123.as_dict()) == TEST_BACKUP_ABC123 diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 125ba8adaad..9df93ee9c46 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1,18 +1,74 @@ """Tests for the Backup integration.""" -from unittest.mock import patch +from asyncio import Future +from collections.abc import Generator +from datetime import datetime +from typing import Any +from unittest.mock import ANY, AsyncMock, MagicMock, call, patch +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.backup.manager import Backup +from homeassistant.components.backup import AgentBackup, BackupAgentError +from homeassistant.components.backup.agent import BackupAgentUnreachableError +from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN +from homeassistant.components.backup.manager import ( + CreateBackupEvent, + CreateBackupState, + NewBackup, + WrittenBackup, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from .common import TEST_BACKUP, setup_backup_integration +from .common import ( + LOCAL_AGENT_ID, + TEST_BACKUP_ABC123, + TEST_BACKUP_DEF456, + BackupAgentTest, + setup_backup_integration, +) +from tests.common import async_fire_time_changed, async_mock_service from tests.typing import WebSocketGenerator +BACKUP_CALL = call( + agent_ids=["test.test-agent"], + backup_name="test-name", + include_addons=["test-addon"], + include_all_addons=False, + include_database=True, + include_folders=["media"], + include_homeassistant=True, + password="test-password", + on_progress=ANY, +) + +DEFAULT_STORAGE_DATA = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": [], + "include_addons": None, + "include_all_addons": False, + "include_database": True, + "include_folders": None, + "name": None, + "password": None, + }, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "retention": { + "copies": None, + "days": None, + }, + "schedule": { + "state": "never", + }, + }, +} + @pytest.fixture def sync_access_token_proxy( @@ -26,145 +82,558 @@ def sync_access_token_proxy( return request.getfixturevalue(access_token_fixture_name) +@pytest.fixture(autouse=True) +def mock_delay_save() -> Generator[None]: + """Mock the delay save constant.""" + with patch("homeassistant.components.backup.store.STORE_DELAY_SAVE", 0): + yield + + +@pytest.fixture(name="create_backup") +def mock_create_backup() -> Generator[AsyncMock]: + """Mock manager create backup.""" + mock_written_backup = MagicMock(spec_set=WrittenBackup) + mock_written_backup.backup.backup_id = "abc123" + mock_written_backup.open_stream = AsyncMock() + mock_written_backup.release_stream = AsyncMock() + fut = Future() + fut.set_result(mock_written_backup) + with patch( + "homeassistant.components.backup.CoreBackupReaderWriter.async_create_backup" + ) as mock_create_backup: + mock_create_backup.return_value = (MagicMock(), fut) + yield mock_create_backup + + +@pytest.fixture(name="delete_backup") +def mock_delete_backup() -> Generator[AsyncMock]: + """Mock manager delete backup.""" + with patch( + "homeassistant.components.backup.BackupManager.async_delete_backup" + ) as mock_delete_backup: + yield mock_delete_backup + + +@pytest.fixture(name="get_backups") +def mock_get_backups() -> Generator[AsyncMock]: + """Mock manager get backups.""" + with patch( + "homeassistant.components.backup.BackupManager.async_get_backups" + ) as mock_get_backups: + yield mock_get_backups + + @pytest.mark.parametrize( - "with_hassio", + ("remote_agents", "remote_backups"), [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + ([], {}), + (["remote"], {}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_DEF456]}), ], ) async def test_info( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + remote_backups: dict[str, list[AgentBackup]], snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: """Test getting backup info.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + await setup_backup_integration( + hass, + with_hassio=False, + backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} | remote_backups, + remote_agents=remote_agents, + ) client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backups", - return_value={TEST_BACKUP.slug: TEST_BACKUP}, - ): - await client.send_json_auto_id({"type": "backup/info"}) - assert snapshot == await client.receive_json() + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot @pytest.mark.parametrize( - "backup_content", - [ - pytest.param(TEST_BACKUP, id="with_backup_content"), - pytest.param(None, id="without_backup_content"), - ], + "side_effect", [HomeAssistantError("Boom!"), BackupAgentUnreachableError] ) +async def test_info_with_errors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + side_effect: Exception, + snapshot: SnapshotAssertion, +) -> None: + """Test getting backup info with one unavailable agent.""" + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch.object(BackupAgentTest, "async_list_backups", side_effect=side_effect): + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + @pytest.mark.parametrize( - "with_hassio", + ("remote_agents", "backups"), [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + ([], {}), + (["remote"], {LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_DEF456]}), + ( + ["remote"], + { + LOCAL_AGENT_ID: [TEST_BACKUP_ABC123], + "test.remote": [TEST_BACKUP_ABC123], + }, + ), ], ) async def test_details( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + backups: dict[str, list[AgentBackup]], snapshot: SnapshotAssertion, - with_hassio: bool, - backup_content: Backup | None, ) -> None: """Test getting backup info.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + await setup_backup_integration( + hass, with_hassio=False, backups=backups, remote_agents=remote_agents + ) client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backup", - return_value=backup_content, - ): - await client.send_json_auto_id({"type": "backup/details", "slug": "abc123"}) + with patch("pathlib.Path.exists", return_value=True): + await client.send_json_auto_id( + {"type": "backup/details", "backup_id": "abc123"} + ) assert await client.receive_json() == snapshot @pytest.mark.parametrize( - "with_hassio", - [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), - ], + "side_effect", [HomeAssistantError("Boom!"), BackupAgentUnreachableError] ) -async def test_remove( +async def test_details_with_errors( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + side_effect: Exception, snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: - """Test removing a backup file.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + """Test getting backup info with one unavailable agent.""" + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.manager.BackupManager.async_remove_backup", + with ( + patch("pathlib.Path.exists", return_value=True), + patch.object(BackupAgentTest, "async_get_backup", side_effect=side_effect), ): - await client.send_json_auto_id({"type": "backup/remove", "slug": "abc123"}) - assert snapshot == await client.receive_json() + await client.send_json_auto_id( + {"type": "backup/details", "backup_id": "abc123"} + ) + assert await client.receive_json() == snapshot @pytest.mark.parametrize( - "with_hassio", + ("remote_agents", "backups"), [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + ([], {}), + (["remote"], {LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + (["remote"], {"test.remote": [TEST_BACKUP_DEF456]}), + ( + ["remote"], + { + LOCAL_AGENT_ID: [TEST_BACKUP_ABC123], + "test.remote": [TEST_BACKUP_ABC123], + }, + ), ], ) +async def test_delete( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + backups: dict[str, list[AgentBackup]], + snapshot: SnapshotAssertion, +) -> None: + """Test deleting a backup file.""" + await setup_backup_integration( + hass, with_hassio=False, backups=backups, remote_agents=remote_agents + ) + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id({"type": "backup/delete", "backup_id": "abc123"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + +@pytest.mark.parametrize( + "storage_data", + [ + DEFAULT_STORAGE_DATA, + DEFAULT_STORAGE_DATA + | { + "backups": [ + { + "backup_id": "abc123", + "failed_agent_ids": ["test.remote"], + "with_strategy_settings": False, + } + ] + }, + ], +) +@pytest.mark.parametrize( + "side_effect", [None, HomeAssistantError("Boom!"), BackupAgentUnreachableError] +) +async def test_delete_with_errors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], + side_effect: Exception, + storage_data: dict[str, Any] | None, + snapshot: SnapshotAssertion, +) -> None: + """Test deleting a backup with one unavailable agent.""" + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch.object(BackupAgentTest, "async_delete_backup", side_effect=side_effect): + await client.send_json_auto_id({"type": "backup/delete", "backup_id": "abc123"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id({"type": "backup/info"}) + assert await client.receive_json() == snapshot + + +async def test_agent_delete_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test deleting a backup file with a mock agent.""" + await setup_backup_integration(hass) + hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")} + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch.object(BackupAgentTest, "async_delete_backup") as delete_mock: + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": "abc123", + } + ) + assert await client.receive_json() == snapshot + + assert delete_mock.call_args == call("abc123") + + +@pytest.mark.parametrize( + "data", + [ + None, + {}, + {"password": "abc123"}, + ], +) +@pytest.mark.usefixtures("mock_backup_generation") async def test_generate( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + data: dict[str, Any] | None, + freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: """Test generating a backup.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + await setup_backup_integration(hass, with_hassio=False) client = await hass_ws_client(hass) + freezer.move_to("2024-11-13 12:01:00+01:00") await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.manager.BackupManager.async_create_backup", - return_value=TEST_BACKUP, - ): - await client.send_json_auto_id({"type": "backup/generate"}) - assert snapshot == await client.receive_json() + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + assert await client.receive_json() == snapshot + await client.send_json_auto_id( + {"type": "backup/generate", **{"agent_ids": ["backup.local"]} | (data or {})} + ) + for _ in range(6): + assert await client.receive_json() == snapshot @pytest.mark.parametrize( - "with_hassio", + ("parameters", "expected_error"), [ - pytest.param(True, id="with_hassio"), - pytest.param(False, id="without_hassio"), + ( + {"include_homeassistant": False}, + "Home Assistant must be included in backup", + ), + ( + {"include_addons": ["blah"]}, + "Addons and folders are not supported by core backup", + ), + ( + {"include_all_addons": True}, + "Addons and folders are not supported by core backup", + ), + ( + {"include_folders": ["ssl"]}, + "Addons and folders are not supported by core backup", + ), ], ) -async def test_restore( +async def test_generate_wrong_parameters( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + parameters: dict[str, Any], + expected_error: str, +) -> None: + """Test generating a backup.""" + await setup_backup_integration(hass, with_hassio=False) + + client = await hass_ws_client(hass) + + default_parameters = {"type": "backup/generate", "agent_ids": ["backup.local"]} + + await client.send_json_auto_id(default_parameters | parameters) + response = await client.receive_json() + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": expected_error, + } + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("params", "expected_extra_call_params"), + [ + ({"agent_ids": ["backup.local"]}, {"agent_ids": ["backup.local"]}), + ( + { + "agent_ids": ["backup.local"], + "include_database": False, + "name": "abc123", + }, + { + "agent_ids": ["backup.local"], + "include_addons": None, + "include_database": False, + "include_folders": None, + "name": "abc123", + }, + ), + ], +) +async def test_generate_calls_create( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, + params: dict[str, Any], + expected_extra_call_params: dict[str, Any], +) -> None: + """Test translation of WS parameter to backup/generate to async_initiate_backup.""" + await setup_backup_integration(hass, with_hassio=False) + + client = await hass_ws_client(hass) + freezer.move_to("2024-11-13 12:01:00+01:00") + await hass.async_block_till_done() + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_initiate_backup", + return_value=NewBackup(backup_job_id="abc123"), + ) as generate_backup: + await client.send_json_auto_id({"type": "backup/generate"} | params) + result = await client.receive_json() + assert result["success"] + assert result["result"] == {"backup_job_id": "abc123"} + generate_backup.assert_called_once_with( + **{ + "include_all_addons": False, + "include_homeassistant": True, + "include_addons": None, + "include_database": True, + "include_folders": None, + "name": None, + "password": None, + } + | expected_extra_call_params + ) + + +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("create_backup_settings", "expected_call_params"), + [ + ( + {}, + { + "agent_ids": [], + "include_addons": None, + "include_all_addons": False, + "include_database": True, + "include_folders": None, + "include_homeassistant": True, + "name": None, + "password": None, + "with_strategy_settings": True, + }, + ), + ( + { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "include_homeassistant": True, + "name": "test-name", + "password": "test-password", + "with_strategy_settings": True, + }, + ), + ], +) +async def test_generate_with_default_settings_calls_create( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, + create_backup_settings: dict[str, Any], + expected_call_params: dict[str, Any], +) -> None: + """Test backup/generate_with_strategy_settings calls async_initiate_backup.""" + await setup_backup_integration(hass, with_hassio=False) + + client = await hass_ws_client(hass) + freezer.move_to("2024-11-13 12:01:00+01:00") + await hass.async_block_till_done() + + await client.send_json_auto_id( + {"type": "backup/config/update", "create_backup": create_backup_settings} + ) + result = await client.receive_json() + assert result["success"] + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_initiate_backup", + return_value=NewBackup(backup_job_id="abc123"), + ) as generate_backup: + await client.send_json_auto_id( + {"type": "backup/generate_with_strategy_settings"} + ) + result = await client.receive_json() + assert result["success"] + assert result["result"] == {"backup_job_id": "abc123"} + generate_backup.assert_called_once_with(**expected_call_params) + + +@pytest.mark.parametrize( + "backups", + [ + {}, + {LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]}, + ], +) +async def test_restore_local_agent( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + backups: dict[str, list[AgentBackup]], snapshot: SnapshotAssertion, - with_hassio: bool, ) -> None: """Test calling the restore command.""" - await setup_backup_integration(hass, with_hassio=with_hassio) + await setup_backup_integration(hass, with_hassio=False, backups=backups) + restart_calls = async_mock_service(hass, "homeassistant", "restart") client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch( - "homeassistant.components.backup.manager.BackupManager.async_restore_backup", + with ( + patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text"), ): - await client.send_json_auto_id({"type": "backup/restore", "slug": "abc123"}) + await client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": "abc123", + "agent_id": "backup.local", + } + ) assert await client.receive_json() == snapshot + assert len(restart_calls) == snapshot + + +@pytest.mark.parametrize( + ("remote_agents", "backups"), + [ + (["remote"], {}), + (["remote"], {"test.remote": [TEST_BACKUP_ABC123]}), + ], +) +async def test_restore_remote_agent( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + remote_agents: list[str], + backups: dict[str, list[AgentBackup]], + snapshot: SnapshotAssertion, +) -> None: + """Test calling the restore command.""" + await setup_backup_integration( + hass, with_hassio=False, backups=backups, remote_agents=remote_agents + ) + restart_calls = async_mock_service(hass, "homeassistant", "restart") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch("pathlib.Path.write_text"), patch("pathlib.Path.open"): + await client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": "abc123", + "agent_id": "test.remote", + } + ) + assert await client.receive_json() == snapshot + assert len(restart_calls) == snapshot @pytest.mark.parametrize( @@ -178,6 +647,7 @@ async def test_restore( pytest.param(False, id="without_hassio"), ], ) +@pytest.mark.usefixtures("supervisor_client") async def test_backup_end( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -197,7 +667,7 @@ async def test_backup_end( "homeassistant.components.backup.manager.BackupManager.async_post_backup_actions", ): await client.send_json_auto_id({"type": "backup/end"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot @pytest.mark.parametrize( @@ -211,6 +681,7 @@ async def test_backup_end( pytest.param(False, id="without_hassio"), ], ) +@pytest.mark.usefixtures("supervisor_client") async def test_backup_start( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -230,7 +701,7 @@ async def test_backup_start( "homeassistant.components.backup.manager.BackupManager.async_pre_backup_actions", ): await client.send_json_auto_id({"type": "backup/start"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot @pytest.mark.parametrize( @@ -241,7 +712,8 @@ async def test_backup_start( Exception("Boom"), ], ) -async def test_backup_end_excepion( +@pytest.mark.usefixtures("supervisor_client") +async def test_backup_end_exception( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, snapshot: SnapshotAssertion, @@ -259,7 +731,7 @@ async def test_backup_end_excepion( side_effect=exception, ): await client.send_json_auto_id({"type": "backup/end"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot @pytest.mark.parametrize( @@ -270,7 +742,8 @@ async def test_backup_end_excepion( Exception("Boom"), ], ) -async def test_backup_start_excepion( +@pytest.mark.usefixtures("supervisor_client") +async def test_backup_start_exception( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, snapshot: SnapshotAssertion, @@ -288,4 +761,993 @@ async def test_backup_start_excepion( side_effect=exception, ): await client.send_json_auto_id({"type": "backup/start"}) - assert snapshot == await client.receive_json() + assert await client.receive_json() == snapshot + + +async def test_agents_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test getting backup agents info.""" + await setup_backup_integration(hass, with_hassio=False) + hass.data[DATA_MANAGER].backup_agents["domain.test"] = BackupAgentTest("test") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/agents/info"}) + assert await client.receive_json() == snapshot + + +@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") +@pytest.mark.parametrize( + "storage_data", + [ + None, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": True, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": 3, "days": 7}, + "last_attempted_strategy_backup": datetime.fromisoformat( + "2024-10-26T04:45:00+01:00" + ), + "last_completed_strategy_backup": datetime.fromisoformat( + "2024-10-26T04:45:00+01:00" + ), + "schedule": {"state": "daily"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": 3, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "never"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": None, "days": 7}, + "last_attempted_strategy_backup": datetime.fromisoformat( + "2024-10-27T04:45:00+01:00" + ), + "last_completed_strategy_backup": datetime.fromisoformat( + "2024-10-26T04:45:00+01:00" + ), + "schedule": {"state": "never"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "mon"}, + }, + }, + { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": None, + "include_all_addons": False, + "include_database": False, + "include_folders": None, + "name": None, + "password": None, + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "sat"}, + }, + }, + ], +) +async def test_config_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + hass_storage: dict[str, Any], + storage_data: dict[str, Any] | None, +) -> None: + """Test getting backup config info.""" + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + + await setup_backup_integration(hass) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + + +@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") +@pytest.mark.parametrize( + "command", + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 7}, + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "mon", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "never", + }, + { + "type": "backup/config/update", + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": 3, "days": 7}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": None}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 7}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": 3}, + "schedule": "daily", + }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"days": 7}, + "schedule": "daily", + }, + ], +) +async def test_config_update( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + command: dict[str, Any], + hass_storage: dict[str, Any], +) -> None: + """Test updating the backup config.""" + await setup_backup_integration(hass) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + await hass.async_block_till_done() + + assert hass_storage[DOMAIN] == snapshot + + +@pytest.mark.usefixtures("create_backup", "delete_backup", "get_backups") +@pytest.mark.parametrize( + "command", + [ + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "schedule": "someday", + }, + ], +) +async def test_config_update_errors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + command: dict[str, Any], +) -> None: + """Test errors when updating the backup config.""" + await setup_backup_integration(hass) + await hass.async_block_till_done() + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert not result["success"] + + await client.send_json_auto_id({"type": "backup/config/info"}) + assert await client.receive_json() == snapshot + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ( + "command", + "last_completed_strategy_backup", + "time_1", + "time_2", + "attempted_backup_time", + "completed_backup_time", + "backup_calls_1", + "backup_calls_2", + "call_args", + "create_backup_side_effect", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + }, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "mon", + }, + "2024-11-11T04:45:00+01:00", + "2024-11-18T04:45:00+01:00", + "2024-11-25T04:45:00+01:00", + "2024-11-18T04:45:00+01:00", + "2024-11-18T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "never", + }, + "2024-11-11T04:45:00+01:00", + "2034-11-11T12:00:00+01:00", # ten years later and still no backups + "2034-11-11T13:00:00+01:00", + "2024-11-11T04:45:00+01:00", + "2024-11-11T04:45:00+01:00", + 0, + 0, + None, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + }, + "2024-10-26T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "mon", + }, + "2024-10-26T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", # missed event uses daily schedule once + "2024-11-12T04:45:00+01:00", # missed event uses daily schedule once + 1, + 1, + BACKUP_CALL, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "never", + }, + "2024-10-26T04:45:00+01:00", + "2034-11-11T12:00:00+01:00", # ten years later and still no backups + "2034-11-12T12:00:00+01:00", + "2024-10-26T04:45:00+01:00", + "2024-10-26T04:45:00+01:00", + 0, + 0, + None, + None, + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "schedule": "daily", + }, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-13T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", # attempted to create backup but failed + "2024-11-11T04:45:00+01:00", + 1, + 2, + BACKUP_CALL, + [Exception("Boom"), None], + ), + ], +) +async def test_config_schedule_logic( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, + command: dict[str, Any], + last_completed_strategy_backup: str, + time_1: str, + time_2: str, + attempted_backup_time: str, + completed_backup_time: str, + backup_calls_1: int, + backup_calls_2: int, + call_args: Any, + create_backup_side_effect: list[Exception | None] | None, +) -> None: + """Test config schedule logic.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": datetime.fromisoformat( + last_completed_strategy_backup + ), + "last_completed_strategy_backup": datetime.fromisoformat( + last_completed_strategy_backup + ), + "schedule": {"state": "daily"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + create_backup.side_effect = create_backup_side_effect + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-11 12:00:00+01:00") + + await setup_backup_integration(hass, remote_agents=["test-agent"]) + await hass.async_block_till_done() + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + freezer.move_to(time_1) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert create_backup.call_count == backup_calls_1 + assert create_backup.call_args == call_args + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + == attempted_backup_time + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + == completed_backup_time + ) + + freezer.move_to(time_2) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert create_backup.call_count == backup_calls_2 + assert create_backup.call_args == call_args + + +@pytest.mark.parametrize( + ( + "command", + "backups", + "get_backups_agent_errors", + "delete_backup_agent_errors", + "last_backup_time", + "next_time", + "backup_time", + "backup_calls", + "get_backups_calls", + "delete_calls", + "delete_args_list", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": None, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, # we get backups even if backup retention copies is None + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {"test-agent": BackupAgentError("Boom!")}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {"test-agent": BackupAgentError("Boom!")}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 0, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 3, + [call("backup-1"), call("backup-2"), call("backup-3")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 0, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock(date="2024-11-12T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 0, + [], + ), + ], +) +async def test_config_retention_copies_logic( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, + delete_backup: AsyncMock, + get_backups: AsyncMock, + command: dict[str, Any], + backups: dict[str, Any], + get_backups_agent_errors: dict[str, Exception], + delete_backup_agent_errors: dict[str, Exception], + last_backup_time: str, + next_time: str, + backup_time: str, + backup_calls: int, + get_backups_calls: int, + delete_calls: int, + delete_args_list: Any, +) -> None: + """Test config backup retention copies logic.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": datetime.fromisoformat(last_backup_time), + "schedule": {"state": "daily"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + get_backups.return_value = (backups, get_backups_agent_errors) + delete_backup.return_value = delete_backup_agent_errors + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-11 12:00:00+01:00") + + await setup_backup_integration(hass, remote_agents=["test-agent"]) + await hass.async_block_till_done() + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + freezer.move_to(next_time) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert create_backup.call_count == backup_calls + assert get_backups.call_count == get_backups_calls + assert delete_backup.call_count == delete_calls + assert delete_backup.call_args_list == delete_args_list + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + == backup_time + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + == backup_time + ) + + +@pytest.mark.parametrize( + ( + "command", + "backups", + "get_backups_agent_errors", + "delete_backup_agent_errors", + "last_backup_time", + "start_time", + "next_time", + "get_backups_calls", + "delete_calls", + "delete_args_list", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 3}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {"test-agent": BackupAgentError("Boom!")}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 2}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {}, + {"test-agent": BackupAgentError("Boom!")}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent"]}, + "retention": {"copies": None, "days": 0}, + "schedule": "never", + }, + { + "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), + "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), + "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-11T12:00:00+01:00", + "2024-11-12T12:00:00+01:00", + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ], +) +async def test_config_retention_days_logic( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + delete_backup: AsyncMock, + get_backups: AsyncMock, + command: dict[str, Any], + backups: dict[str, Any], + get_backups_agent_errors: dict[str, Exception], + delete_backup_agent_errors: dict[str, Exception], + last_backup_time: str, + start_time: str, + next_time: str, + get_backups_calls: int, + delete_calls: int, + delete_args_list: list[Any], +) -> None: + """Test config backup retention logic.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": datetime.fromisoformat(last_backup_time), + "schedule": {"state": "never"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + get_backups.return_value = (backups, get_backups_agent_errors) + delete_backup.return_value = delete_backup_agent_errors + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to(start_time) + + await setup_backup_integration(hass) + await hass.async_block_till_done() + + await client.send_json_auto_id(command) + result = await client.receive_json() + + assert result["success"] + + freezer.move_to(next_time) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert get_backups.call_count == get_backups_calls + assert delete_backup.call_count == delete_calls + assert delete_backup.call_args_list == delete_args_list + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + + +async def test_subscribe_event( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test generating a backup.""" + await setup_backup_integration(hass, with_hassio=False) + + manager = hass.data[DATA_MANAGER] + + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + assert await client.receive_json() == snapshot + assert await client.receive_json() == snapshot + + manager.async_on_backup_event( + CreateBackupEvent(stage=None, state=CreateBackupState.IN_PROGRESS) + ) + assert await client.receive_json() == snapshot diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py new file mode 100644 index 00000000000..16b446c7a2b --- /dev/null +++ b/tests/components/cloud/test_backup.py @@ -0,0 +1,568 @@ +"""Test the cloud backup platform.""" + +from collections.abc import AsyncGenerator, AsyncIterator, Generator +from io import StringIO +from typing import Any +from unittest.mock import Mock, PropertyMock, patch + +from aiohttp import ClientError +from hass_nabucasa import CloudError +import pytest +from yarl import URL + +from homeassistant.components.backup import ( + DOMAIN as BACKUP_DOMAIN, + AddonInfo, + AgentBackup, + Folder, +) +from homeassistant.components.cloud import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator, MagicMock, WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def setup_integration( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, cloud: MagicMock +) -> AsyncGenerator[None]: + """Set up cloud integration.""" + with patch("homeassistant.components.backup.is_hassio", return_value=False): + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + yield + + +@pytest.fixture +def mock_delete_file() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_delete_file", + spec_set=True, + ) as delete_file: + yield delete_file + + +@pytest.fixture +def mock_get_download_details() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_download_details", + spec_set=True, + ) as download_details: + download_details.return_value = { + "url": ( + "https://blabla.cloudflarestorage.com/blabla/backup/" + "462e16810d6841228828d9dd2f9e341e.tar?X-Amz-Algorithm=blah" + ), + } + yield download_details + + +@pytest.fixture +def mock_get_upload_details() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_upload_details", + spec_set=True, + ) as download_details: + download_details.return_value = { + "url": ( + "https://blabla.cloudflarestorage.com/blabla/backup/" + "ea5c969e492c49df89d432a1483b8dc3.tar?X-Amz-Algorithm=blah" + ), + "headers": { + "content-md5": "HOhSM3WZkpHRYGiz4YRGIQ==", + "x-amz-meta-storage-type": "backup", + "x-amz-meta-b64json": ( + "eyJhZGRvbnMiOltdLCJiYWNrdXBfaWQiOiJjNDNiNWU2MCIsImRhdGUiOiIyMDI0LT" + "EyLTAzVDA0OjI1OjUwLjMyMDcwMy0wNTowMCIsImRhdGFiYXNlX2luY2x1ZGVkIjpm" + "YWxzZSwiZm9sZGVycyI6W10sImhvbWVhc3Npc3RhbnRfaW5jbHVkZWQiOnRydWUsIm" + "hvbWVhc3Npc3RhbnRfdmVyc2lvbiI6IjIwMjQuMTIuMC5kZXYwIiwibmFtZSI6ImVy" + "aWsiLCJwcm90ZWN0ZWQiOnRydWUsInNpemUiOjM1NjI0OTYwfQ==" + ), + }, + } + yield download_details + + +@pytest.fixture +def mock_list_files() -> Generator[MagicMock]: + """Mock list files.""" + with patch( + "homeassistant.components.cloud.backup.async_files_list", spec_set=True + ) as list_files: + list_files.return_value = [ + { + "Key": "462e16810d6841228828d9dd2f9e341e.tar", + "LastModified": "2024-11-22T10:49:01.182Z", + "Size": 34519040, + "Metadata": { + "addons": [], + "backup_id": "23e64aec", + "date": "2024-11-22T11:48:48.727189+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0.dev0", + "name": "Core 2024.12.0.dev0", + "protected": False, + "size": 34519040, + "storage-type": "backup", + }, + } + ] + yield list_files + + +@pytest.fixture +def cloud_logged_in(cloud: MagicMock): + """Mock cloud logged in.""" + type(cloud).is_logged_in = PropertyMock(return_value=True) + + +async def test_agents_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "backup.local"}, {"agent_id": "cloud.cloud"}], + } + + +async def test_agents_list_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + cloud: MagicMock, + mock_list_files: Mock, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + mock_list_files.assert_called_once_with(cloud, storage_type="backup") + + assert response["success"] + assert response["result"]["agent_errors"] == {} + assert response["result"]["backups"] == [ + { + "addons": [], + "backup_id": "23e64aec", + "date": "2024-11-22T11:48:48.727189+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0.dev0", + "name": "Core 2024.12.0.dev0", + "protected": False, + "size": 34519040, + "agent_ids": ["cloud.cloud"], + "failed_agent_ids": [], + "with_strategy_settings": False, + } + ] + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +async def test_agents_list_backups_fail_cloud( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + cloud: MagicMock, + mock_list_files: Mock, + side_effect: Exception, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + mock_list_files.side_effect = side_effect + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agent_errors": {"cloud.cloud": "Failed to list backups"}, + "backups": [], + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + } + + +@pytest.mark.parametrize( + ("backup_id", "expected_result"), + [ + ( + "23e64aec", + { + "addons": [], + "backup_id": "23e64aec", + "date": "2024-11-22T11:48:48.727189+01:00", + "database_included": True, + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0.dev0", + "name": "Core 2024.12.0.dev0", + "protected": False, + "size": 34519040, + "agent_ids": ["cloud.cloud"], + "failed_agent_ids": [], + "with_strategy_settings": False, + }, + ), + ( + "12345", + None, + ), + ], + ids=["found", "not_found"], +) +async def test_agents_get_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + cloud: MagicMock, + backup_id: str, + expected_result: dict[str, Any] | None, + mock_list_files: Mock, +) -> None: + """Test agent get backup.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) + response = await client.receive_json() + mock_list_files.assert_called_once_with(cloud, storage_type="backup") + + assert response["success"] + assert response["result"]["agent_errors"] == {} + assert response["result"]["backup"] == expected_result + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_get_download_details: Mock, +) -> None: + """Test agent download backup.""" + client = await hass_client() + backup_id = "23e64aec" + + aioclient_mock.get( + mock_get_download_details.return_value["url"], content=b"backup data" + ) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download_fail_cloud( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_get_download_details: Mock, + side_effect: Exception, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "23e64aec" + mock_get_download_details.side_effect = side_effect + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 500 + content = await resp.content.read() + assert "Failed to get download details" in content.decode() + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download_fail_get( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_get_download_details: Mock, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "23e64aec" + + aioclient_mock.get(mock_get_download_details.return_value["url"], status=500) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 500 + content = await resp.content.read() + assert "Failed to download backup" in content.decode() + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_download_not_found( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test agent download backup raises error if not found.""" + client = await hass_client() + backup_id = "1234" + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=cloud.cloud") + assert resp.status == 404 + assert await resp.content.read() == b"" + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + aioclient_mock: AiohttpClientMocker, + mock_get_upload_details: Mock, +) -> None: + """Test agent upload backup.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0.0, + ) + aioclient_mock.put(mock_get_upload_details.return_value["url"]) + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert len(aioclient_mock.mock_calls) == 1 + assert aioclient_mock.mock_calls[-1][0] == "PUT" + assert aioclient_mock.mock_calls[-1][1] == URL( + mock_get_upload_details.return_value["url"] + ) + assert isinstance(aioclient_mock.mock_calls[-1][2], AsyncIterator) + + assert resp.status == 201 + assert f"Uploading backup {backup_id}" in caplog.text + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_upload_fail_put( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, + aioclient_mock: AiohttpClientMocker, + mock_get_upload_details: Mock, +) -> None: + """Test agent upload backup fails.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0.0, + ) + aioclient_mock.put(mock_get_upload_details.return_value["url"], status=500) + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert "Error during backup upload - Failed to upload backup" in caplog.text + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +@pytest.mark.usefixtures("cloud_logged_in") +async def test_agents_upload_fail_cloud( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_get_upload_details: Mock, + side_effect: Exception, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test agent upload backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "test-backup" + mock_get_upload_details.side_effect = side_effect + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=0.0, + ) + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[b"test", b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert "Error during backup upload - Failed to get upload details" in caplog.text + + +async def test_agents_upload_not_protected( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test agent upload backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + with ( + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + ): + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert "Error during backup upload - Cloud backups must be protected" in caplog.text + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_delete( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_delete_file: Mock, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "23e64aec" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}} + mock_delete_file.assert_called_once() + + +@pytest.mark.parametrize("side_effect", [ClientError, CloudError]) +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_delete_fail_cloud( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_delete_file: Mock, + side_effect: Exception, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "23e64aec" + mock_delete_file.side_effect = side_effect + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agent_errors": {"cloud.cloud": "Failed to delete backup"} + } + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_delete_not_found( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test agent download backup raises error if not found.""" + client = await hass_ws_client(hass) + backup_id = "1234" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {"cloud.cloud": "Backup not found"}} diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 97b1d337e82..71c3b14050d 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -533,6 +533,10 @@ def supervisor_client() -> Generator[AsyncMock]: "homeassistant.components.hassio.addon_manager.get_supervisor_client", return_value=supervisor_client, ), + patch( + "homeassistant.components.hassio.backup.get_supervisor_client", + return_value=supervisor_client, + ), patch( "homeassistant.components.hassio.discovery.get_supervisor_client", return_value=supervisor_client, diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py new file mode 100644 index 00000000000..660753bd815 --- /dev/null +++ b/tests/components/hassio/test_backup.py @@ -0,0 +1,403 @@ +"""Test supervisor backup functionality.""" + +from collections.abc import AsyncGenerator, Generator +from datetime import datetime +from io import StringIO +import os +from typing import Any +from unittest.mock import AsyncMock, patch + +from aiohasupervisor.models import backups as supervisor_backups +import pytest + +from homeassistant.components.backup import ( + DOMAIN as BACKUP_DOMAIN, + AddonInfo, + AgentBackup, + Folder, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .test_init import MOCK_ENVIRON + +from tests.typing import ClientSessionGenerator, WebSocketGenerator + +TEST_BACKUP = supervisor_backups.Backup( + compressed=False, + content=supervisor_backups.BackupContent( + addons=["ssl"], + folders=["share"], + homeassistant=True, + ), + date=datetime.fromisoformat("1970-01-01T00:00:00Z"), + location=None, + locations={None}, + name="Test", + protected=False, + size=1.0, + size_bytes=1048576, + slug="abc123", + type=supervisor_backups.BackupType.PARTIAL, +) +TEST_BACKUP_DETAILS = supervisor_backups.BackupComplete( + addons=[ + supervisor_backups.BackupAddon( + name="Terminal & SSH", + size=0.0, + slug="core_ssh", + version="9.14.0", + ) + ], + compressed=TEST_BACKUP.compressed, + date=TEST_BACKUP.date, + extra=None, + folders=["share"], + homeassistant_exclude_database=False, + homeassistant="2024.12.0", + location=TEST_BACKUP.location, + locations=TEST_BACKUP.locations, + name=TEST_BACKUP.name, + protected=TEST_BACKUP.protected, + repositories=[], + size=TEST_BACKUP.size, + size_bytes=TEST_BACKUP.size_bytes, + slug=TEST_BACKUP.slug, + supervisor_version="2024.11.2", + type=TEST_BACKUP.type, +) + + +@pytest.fixture(autouse=True) +def fixture_supervisor_environ() -> Generator[None]: + """Mock os environ for supervisor.""" + with patch.dict(os.environ, MOCK_ENVIRON): + yield + + +@pytest.fixture(autouse=True) +async def setup_integration( + hass: HomeAssistant, supervisor_client: AsyncMock +) -> AsyncGenerator[None]: + """Set up Backup integration.""" + with ( + patch("homeassistant.components.backup.is_hassio", return_value=True), + patch("homeassistant.components.backup.backup.is_hassio", return_value=True), + ): + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + await hass.async_block_till_done() + yield + + +@pytest.mark.usefixtures("hassio_client") +async def test_agent_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "hassio.local"}], + } + + +@pytest.mark.usefixtures("hassio_client") +async def test_agent_list_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["backups"] == [ + { + "addons": [ + {"name": "Terminal & SSH", "slug": "core_ssh", "version": "9.14.0"} + ], + "agent_ids": ["hassio.local"], + "backup_id": "abc123", + "database_included": True, + "date": "1970-01-01T00:00:00+00:00", + "failed_agent_ids": [], + "folders": ["share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": False, + "size": 1048576, + "with_strategy_settings": False, + } + ] + + +@pytest.mark.usefixtures("hassio_client") +async def test_agent_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "abc123" + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + supervisor_client.backups.download_backup.return_value.__aiter__.return_value = ( + iter((b"backup data",)) + ) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=hassio.local") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + +@pytest.mark.usefixtures("hassio_client") +async def test_agent_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent upload backup.""" + client = await hass_client() + backup_id = "test-backup" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + + supervisor_client.backups.reload.assert_not_called() + with ( + patch("pathlib.Path.mkdir"), + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("shutil.copy"), + ): + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=hassio.local", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + supervisor_client.backups.reload.assert_not_called() + + +@pytest.mark.usefixtures("hassio_client") +async def test_agent_delete_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "abc123" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agent_errors": {}} + supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) + + +@pytest.mark.usefixtures("hassio_client") +async def test_reader_writer_create( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test generating a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["success"] + assert response["result"] == {"backup_job_id": "abc123"} + + supervisor_client.backups.partial_backup.assert_called_once_with( + supervisor_backups.PartialBackupOptions( + addons=None, + background=True, + compressed=True, + folders=None, + homeassistant_exclude_database=False, + homeassistant=True, + location={None}, + name="Test", + password=None, + ) + ) + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123", "reference": "test_slug"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": "upload_to_agents", + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "completed", + } + + +@pytest.mark.usefixtures("hassio_client") +async def test_reader_writer_restore( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test restoring a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_restore.return_value.job_id = "abc123" + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/restore", "agent_id": "hassio.local", "backup_id": "abc123"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "restore_backup", + "stage": None, + "state": "in_progress", + } + + supervisor_client.backups.partial_restore.assert_called_once_with( + "abc123", + supervisor_backups.PartialRestoreOptions( + addons=None, + background=True, + folders=None, + homeassistant=True, + password=None, + ), + ) + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + + response = await client.receive_json() + assert response["success"] + assert response["result"] is None + + +@pytest.mark.parametrize( + ("parameters", "expected_error"), + [ + ( + {"restore_database": False}, + "Cannot restore Home Assistant without database", + ), + ( + {"restore_homeassistant": False}, + "Cannot restore database without Home Assistant", + ), + ], +) +@pytest.mark.usefixtures("hassio_client") +async def test_reader_writer_restore_wrong_parameters( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + parameters: dict[str, Any], + expected_error: str, +) -> None: + """Test trigger restore.""" + client = await hass_ws_client(hass) + supervisor_client.backups.list.return_value = [TEST_BACKUP] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + default_parameters = { + "type": "backup/restore", + "agent_id": "hassio.local", + "backup_id": "abc123", + } + + await client.send_json_auto_id(default_parameters | parameters) + response = await client.receive_json() + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": expected_error, + } diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py new file mode 100644 index 00000000000..7db03b7fa46 --- /dev/null +++ b/tests/components/kitchen_sink/test_backup.py @@ -0,0 +1,194 @@ +"""Test the Kitchen Sink backup platform.""" + +from collections.abc import AsyncGenerator +from io import StringIO +from unittest.mock import patch + +import pytest + +from homeassistant.components.backup import ( + DOMAIN as BACKUP_DOMAIN, + AddonInfo, + AgentBackup, + Folder, +) +from homeassistant.components.kitchen_sink import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def backup_only() -> AsyncGenerator[None]: + """Enable only the backup platform. + + The backup platform is not an entity platform. + """ + with patch( + "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", + [], + ): + yield + + +@pytest.fixture(autouse=True) +async def setup_integration(hass: HomeAssistant) -> AsyncGenerator[None]: + """Set up Kitchen Sink integration.""" + with patch("homeassistant.components.backup.is_hassio", return_value=False): + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + await hass.async_block_till_done() + yield + + +async def test_agents_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test backup agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "backup.local"}, {"agent_id": "kitchen_sink.syncer"}], + } + + +async def test_agents_list_backups( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test agent list backups.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"]["backups"] == [ + { + "addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], + "agent_ids": ["kitchen_sink.syncer"], + "backup_id": "abc123", + "database_included": False, + "date": "1970-01-01T00:00:00Z", + "failed_agent_ids": [], + "folders": ["media", "share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Kitchen sink syncer", + "protected": False, + "size": 1234, + "with_strategy_settings": False, + } + ] + + +async def test_agents_download( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test downloading a backup.""" + client = await hass_client() + + resp = await client.get("/api/backup/download/abc123?agent_id=kitchen_sink.syncer") + assert resp.status == 200 + assert await resp.content.read() == b"backup data" + + +async def test_agents_upload( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + hass_supervisor_access_token: str, +) -> None: + """Test agent upload backup.""" + ws_client = await hass_ws_client(hass, hass_supervisor_access_token) + client = await hass_client() + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + + with ( + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + ): + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=kitchen_sink.syncer", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + assert f"Uploading backup {backup_id}" in caplog.text + + await ws_client.send_json_auto_id({"type": "backup/info"}) + response = await ws_client.receive_json() + + assert response["success"] + backup_list = response["result"]["backups"] + assert len(backup_list) == 2 + assert backup_list[1] == { + "addons": [{"name": "Test", "slug": "test", "version": "1.0.0"}], + "agent_ids": ["kitchen_sink.syncer"], + "backup_id": "test-backup", + "database_included": True, + "date": "1970-01-01T00:00:00.000Z", + "failed_agent_ids": [], + "folders": ["media", "share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": False, + "size": 0.0, + "with_strategy_settings": False, + } + + +async def test_agent_delete_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "abc123" + + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response["success"] + assert f"Deleted backup {backup_id}" in caplog.text + + await client.send_json_auto_id({"type": "backup/info"}) + response = await client.receive_json() + + assert response["success"] + backup_list = response["result"]["backups"] + assert not backup_list diff --git a/tests/test_backup_restore.py b/tests/test_backup_restore.py index 44a05c0540e..bce5eca4292 100644 --- a/tests/test_backup_restore.py +++ b/tests/test_backup_restore.py @@ -19,7 +19,29 @@ from .common import get_test_config_dir ( None, '{"path": "test"}', - backup_restore.RestoreBackupFileContent(backup_file_path=Path("test")), + None, + ), + ( + None, + '{"path": "test", "password": "psw", "remove_after_restore": false, "restore_database": false, "restore_homeassistant": true}', + backup_restore.RestoreBackupFileContent( + backup_file_path=Path("test"), + password="psw", + remove_after_restore=False, + restore_database=False, + restore_homeassistant=True, + ), + ), + ( + None, + '{"path": "test", "password": null, "remove_after_restore": true, "restore_database": true, "restore_homeassistant": false}', + backup_restore.RestoreBackupFileContent( + backup_file_path=Path("test"), + password=None, + remove_after_restore=True, + restore_database=True, + restore_homeassistant=False, + ), ), ], ) @@ -49,7 +71,11 @@ def test_restoring_backup_that_does_not_exist() -> None: mock.patch( "homeassistant.backup_restore.restore_backup_file_content", return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, ), ), mock.patch("pathlib.Path.read_text", side_effect=FileNotFoundError), @@ -78,7 +104,11 @@ def test_restoring_backup_that_is_not_a_file() -> None: mock.patch( "homeassistant.backup_restore.restore_backup_file_content", return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, ), ), mock.patch("pathlib.Path.exists", return_value=True), @@ -102,7 +132,11 @@ def test_aborting_for_older_versions() -> None: mock.patch( "homeassistant.backup_restore.restore_backup_file_content", return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, ), ), mock.patch("securetar.SecureTarFile"), @@ -117,14 +151,78 @@ def test_aborting_for_older_versions() -> None: assert backup_restore.restore_backup(config_dir) is True -def test_removal_of_current_configuration_when_restoring() -> None: +@pytest.mark.parametrize( + ( + "restore_backup_content", + "expected_removed_files", + "expected_removed_directories", + "expected_copied_files", + "expected_copied_trees", + ), + [ + ( + backup_restore.RestoreBackupFileContent( + backup_file_path=None, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, + ), + ( + ".HA_RESTORE", + ".HA_VERSION", + "home-assistant_v2.db", + "home-assistant_v2.db-wal", + ), + ("tmp_backups", "www"), + (), + ("data",), + ), + ( + backup_restore.RestoreBackupFileContent( + backup_file_path=None, + password=None, + restore_database=False, + remove_after_restore=False, + restore_homeassistant=True, + ), + (".HA_RESTORE", ".HA_VERSION"), + ("tmp_backups", "www"), + (), + ("data",), + ), + ( + backup_restore.RestoreBackupFileContent( + backup_file_path=None, + password=None, + restore_database=True, + remove_after_restore=False, + restore_homeassistant=False, + ), + ("home-assistant_v2.db", "home-assistant_v2.db-wal"), + (), + ("home-assistant_v2.db", "home-assistant_v2.db-wal"), + (), + ), + ], +) +def test_removal_of_current_configuration_when_restoring( + restore_backup_content: backup_restore.RestoreBackupFileContent, + expected_removed_files: tuple[str, ...], + expected_removed_directories: tuple[str, ...], + expected_copied_files: tuple[str, ...], + expected_copied_trees: tuple[str, ...], +) -> None: """Test that we are removing the current configuration directory.""" config_dir = Path(get_test_config_dir()) - backup_file_path = Path(config_dir, "backups", "test.tar") + restore_backup_content.backup_file_path = Path(config_dir, "backups", "test.tar") mock_config_dir = [ {"path": Path(config_dir, ".HA_RESTORE"), "is_file": True}, {"path": Path(config_dir, ".HA_VERSION"), "is_file": True}, + {"path": Path(config_dir, "home-assistant_v2.db"), "is_file": True}, + {"path": Path(config_dir, "home-assistant_v2.db-wal"), "is_file": True}, {"path": Path(config_dir, "backups"), "is_file": False}, + {"path": Path(config_dir, "tmp_backups"), "is_file": False}, {"path": Path(config_dir, "www"), "is_file": False}, ] @@ -140,12 +238,10 @@ def test_removal_of_current_configuration_when_restoring() -> None: with ( mock.patch( "homeassistant.backup_restore.restore_backup_file_content", - return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path - ), + return_value=restore_backup_content, ), mock.patch("securetar.SecureTarFile"), - mock.patch("homeassistant.backup_restore.TemporaryDirectory"), + mock.patch("homeassistant.backup_restore.TemporaryDirectory") as temp_dir_mock, mock.patch("homeassistant.backup_restore.HA_VERSION", "2013.09.17"), mock.patch("pathlib.Path.read_text", _patched_path_read_text), mock.patch("pathlib.Path.is_file", _patched_path_is_file), @@ -154,17 +250,33 @@ def test_removal_of_current_configuration_when_restoring() -> None: "pathlib.Path.iterdir", return_value=[x["path"] for x in mock_config_dir], ), - mock.patch("pathlib.Path.unlink") as unlink_mock, - mock.patch("shutil.rmtree") as rmtreemock, + mock.patch("pathlib.Path.unlink", autospec=True) as unlink_mock, + mock.patch("shutil.copy") as copy_mock, + mock.patch("shutil.copytree") as copytree_mock, + mock.patch("shutil.rmtree") as rmtree_mock, ): - assert backup_restore.restore_backup(config_dir) is True - assert unlink_mock.call_count == 2 - assert ( - rmtreemock.call_count == 1 - ) # We have 2 directories in the config directory, but backups is kept + temp_dir_mock.return_value.__enter__.return_value = "tmp" - removed_directories = {Path(call.args[0]) for call in rmtreemock.mock_calls} - assert removed_directories == {Path(config_dir, "www")} + assert backup_restore.restore_backup(config_dir) is True + + tmp_ha = Path("tmp", "homeassistant") + assert copy_mock.call_count == len(expected_copied_files) + copied_files = {Path(call.args[0]) for call in copy_mock.mock_calls} + assert copied_files == {Path(tmp_ha, "data", f) for f in expected_copied_files} + + assert copytree_mock.call_count == len(expected_copied_trees) + copied_trees = {Path(call.args[0]) for call in copytree_mock.mock_calls} + assert copied_trees == {Path(tmp_ha, t) for t in expected_copied_trees} + + assert unlink_mock.call_count == len(expected_removed_files) + removed_files = {Path(call.args[0]) for call in unlink_mock.mock_calls} + assert removed_files == {Path(config_dir, f) for f in expected_removed_files} + + assert rmtree_mock.call_count == len(expected_removed_directories) + removed_directories = {Path(call.args[0]) for call in rmtree_mock.mock_calls} + assert removed_directories == { + Path(config_dir, d) for d in expected_removed_directories + } def test_extracting_the_contents_of_a_backup_file() -> None: @@ -177,8 +289,8 @@ def test_extracting_the_contents_of_a_backup_file() -> None: getmembers_mock = mock.MagicMock( return_value=[ + tarfile.TarInfo(name="../data/test"), tarfile.TarInfo(name="data"), - tarfile.TarInfo(name="data/../test"), tarfile.TarInfo(name="data/.HA_VERSION"), tarfile.TarInfo(name="data/.storage"), tarfile.TarInfo(name="data/www"), @@ -190,7 +302,11 @@ def test_extracting_the_contents_of_a_backup_file() -> None: mock.patch( "homeassistant.backup_restore.restore_backup_file_content", return_value=backup_restore.RestoreBackupFileContent( - backup_file_path=backup_file_path + backup_file_path=backup_file_path, + password=None, + remove_after_restore=False, + restore_database=True, + restore_homeassistant=True, ), ), mock.patch( @@ -205,11 +321,59 @@ def test_extracting_the_contents_of_a_backup_file() -> None: mock.patch("pathlib.Path.read_text", _patched_path_read_text), mock.patch("pathlib.Path.is_file", return_value=False), mock.patch("pathlib.Path.iterdir", return_value=[]), + mock.patch("shutil.copytree"), ): assert backup_restore.restore_backup(config_dir) is True - assert getmembers_mock.call_count == 1 assert extractall_mock.call_count == 2 assert { member.name for member in extractall_mock.mock_calls[-1].kwargs["members"] - } == {".HA_VERSION", ".storage", "www"} + } == {"data", "data/.HA_VERSION", "data/.storage", "data/www"} + + +@pytest.mark.parametrize( + ("remove_after_restore", "unlink_calls"), [(True, 1), (False, 0)] +) +def test_remove_backup_file_after_restore( + remove_after_restore: bool, unlink_calls: int +) -> None: + """Test removing a backup file after restore.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path, + password=None, + remove_after_restore=remove_after_restore, + restore_database=True, + restore_homeassistant=True, + ), + ), + mock.patch("homeassistant.backup_restore._extract_backup"), + mock.patch("pathlib.Path.unlink", autospec=True) as mock_unlink, + ): + assert backup_restore.restore_backup(config_dir) is True + assert mock_unlink.call_count == unlink_calls + for call in mock_unlink.mock_calls: + assert call.args[0] == backup_file_path + + +@pytest.mark.parametrize( + ("password", "expected"), + [ + ("test", b"\xf0\x9b\xb9\x1f\xdc,\xff\xd5x\xd6\xd6\x8fz\x19.\x0f"), + ("lorem ipsum...", b"#\xe0\xfc\xe0\xdb?_\x1f,$\rQ\xf4\xf5\xd8\xfb"), + ], +) +def test_pw_to_key(password: str | None, expected: bytes | None) -> None: + """Test password to key conversion.""" + assert backup_restore.password_to_key(password) == expected + + +def test_pw_to_key_none() -> None: + """Test password to key conversion.""" + with pytest.raises(AttributeError): + backup_restore.password_to_key(None) From 4c5965ffc9fb028000e176202b9f5d43510da6a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Wed, 11 Dec 2024 22:47:14 +0100 Subject: [PATCH 089/677] Add reconfiguration flow to myuplink (#132970) * Add reconfiguration flow * Tick reconfiguration-flow rule --- .../components/myuplink/config_flow.py | 17 +++- .../components/myuplink/quality_scale.yaml | 2 +- .../components/myuplink/strings.json | 1 + tests/components/myuplink/test_config_flow.py | 93 +++++++++++++++++++ 4 files changed, 111 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/myuplink/config_flow.py b/homeassistant/components/myuplink/config_flow.py index 15bff643185..cf0428f59ce 100644 --- a/homeassistant/components/myuplink/config_flow.py +++ b/homeassistant/components/myuplink/config_flow.py @@ -6,7 +6,11 @@ from typing import Any import jwt -from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + ConfigFlowResult, +) from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN, OAUTH2_SCOPES @@ -48,6 +52,12 @@ class OAuth2FlowHandler( return await self.async_step_user() + async def async_step_reconfigure( + self, user_input: Mapping[str, Any] | None = None + ) -> ConfigFlowResult: + """User initiated reconfiguration.""" + return await self.async_step_user() + async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: """Create or update the config entry.""" @@ -62,5 +72,10 @@ class OAuth2FlowHandler( return self.async_update_reload_and_abort( self._get_reauth_entry(), data=data ) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="account_mismatch") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), data=data + ) self._abort_if_unique_id_configured() return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml index 661986a2f71..463002b5519 100644 --- a/homeassistant/components/myuplink/quality_scale.yaml +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -82,7 +82,7 @@ rules: status: todo comment: PR pending review \#191937 icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: | diff --git a/homeassistant/components/myuplink/strings.json b/homeassistant/components/myuplink/strings.json index bd60a3c7bb3..d3d2f198448 100644 --- a/homeassistant/components/myuplink/strings.json +++ b/homeassistant/components/myuplink/strings.json @@ -23,6 +23,7 @@ "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "account_mismatch": "The used account does not match the original account", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]" }, diff --git a/tests/components/myuplink/test_config_flow.py b/tests/components/myuplink/test_config_flow.py index e823402bda6..0b8d0dba17a 100644 --- a/tests/components/myuplink/test_config_flow.py +++ b/tests/components/myuplink/test_config_flow.py @@ -181,3 +181,96 @@ async def test_flow_reauth_abort( assert result.get("reason") == expected_reason assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + +@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.parametrize( + ("unique_id", "scope", "expected_reason"), + [ + ( + UNIQUE_ID, + CURRENT_SCOPE, + "reconfigure_successful", + ), + ( + "wrong_uid", + CURRENT_SCOPE, + "account_mismatch", + ), + ], + ids=["reauth_only", "account_mismatch"], +) +async def test_flow_reconfigure_abort( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + setup_credentials: None, + mock_config_entry: MockConfigEntry, + access_token: str, + expires_at: float, + unique_id: str, + scope: str, + expected_reason: str, +) -> None: + """Test reauth step with correct params and mismatches.""" + + CURRENT_TOKEN = { + "auth_implementation": DOMAIN, + "token": { + "access_token": access_token, + "scope": scope, + "expires_in": 86399, + "refresh_token": "3012bc9f-7a65-4240-b817-9154ffdcc30f", + "token_type": "Bearer", + "expires_at": expires_at, + }, + } + assert hass.config_entries.async_update_entry( + mock_config_entry, data=CURRENT_TOKEN, unique_id=unique_id + ) + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["step_id"] == "auth" + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT_URL, + }, + ) + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + f"&redirect_uri={REDIRECT_URL}" + f"&state={state}" + f"&scope={CURRENT_SCOPE.replace(' ', '+')}" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "updated-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": "60", + "scope": CURRENT_SCOPE, + }, + ) + + with patch( + f"homeassistant.components.{DOMAIN}.async_setup_entry", return_value=True + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == expected_reason + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 From 95f48963d4d63fdb1a5e7c10c87ff694b50a7525 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Wed, 11 Dec 2024 23:11:11 +0100 Subject: [PATCH 090/677] Set strict typing for myuplink (#132972) Set strict typing --- homeassistant/components/myuplink/__init__.py | 6 ++++-- homeassistant/components/myuplink/quality_scale.yaml | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/myuplink/__init__.py b/homeassistant/components/myuplink/__init__.py index c3ff8b6988b..e833c5fcd8e 100644 --- a/homeassistant/components/myuplink/__init__.py +++ b/homeassistant/components/myuplink/__init__.py @@ -77,14 +77,16 @@ async def async_setup_entry( return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: MyUplinkConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @callback def create_devices( - hass: HomeAssistant, config_entry: ConfigEntry, coordinator: MyUplinkDataCoordinator + hass: HomeAssistant, + config_entry: MyUplinkConfigEntry, + coordinator: MyUplinkDataCoordinator, ) -> None: """Update all devices.""" device_registry = dr.async_get(hass) diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml index 463002b5519..ef64ce757f5 100644 --- a/homeassistant/components/myuplink/quality_scale.yaml +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -95,4 +95,4 @@ rules: # Platinum async-dependency: done inject-websession: done - strict-typing: todo + strict-typing: done From eea781f34a50d1ddab6b84ae3f5383104e65285c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 12 Dec 2024 05:46:31 +0100 Subject: [PATCH 091/677] Bump led-ble to 1.1.1 (#132977) changelog: https://github.com/Bluetooth-Devices/led-ble/compare/v1.0.2...v1.1.1 --- homeassistant/components/led_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index 1d12e355a0d..4aaaebc0006 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -35,5 +35,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.20.0", "led-ble==1.0.2"] + "requirements": ["bluetooth-data-tools==1.20.0", "led-ble==1.1.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 661ce5876a9..10b8c650127 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1278,7 +1278,7 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.0.2 +led-ble==1.1.1 # homeassistant.components.lektrico lektricowifi==0.0.43 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c959d83723c..194e29e35e8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1074,7 +1074,7 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.0.2 +led-ble==1.1.1 # homeassistant.components.lektrico lektricowifi==0.0.43 From b02ccd0813c5eb731ca9b3dceae19c8f69ca08c5 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Thu, 12 Dec 2024 07:47:57 +0100 Subject: [PATCH 092/677] Add missing body height icon in Withings integration (#132991) Update icons.json --- homeassistant/components/withings/icons.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/withings/icons.json b/homeassistant/components/withings/icons.json index 79ff7489bf8..8123337dc82 100644 --- a/homeassistant/components/withings/icons.json +++ b/homeassistant/components/withings/icons.json @@ -16,6 +16,9 @@ "heart_pulse": { "default": "mdi:heart-pulse" }, + "height": { + "default": "mdi:human-male-height-variant" + }, "hydration": { "default": "mdi:water" }, From 7e071d1fc6b1fad0ebfbc28e58e039ceff93407a Mon Sep 17 00:00:00 2001 From: Tom Date: Thu, 12 Dec 2024 07:49:08 +0100 Subject: [PATCH 093/677] Introduce parallel updates for Plugwise (#132940) * Plugwise indicate parallel updates * Update homeassistant/components/plugwise/number.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/plugwise/binary_sensor.py | 3 +++ homeassistant/components/plugwise/button.py | 2 ++ homeassistant/components/plugwise/climate.py | 2 ++ homeassistant/components/plugwise/number.py | 2 ++ homeassistant/components/plugwise/quality_scale.yaml | 4 +--- homeassistant/components/plugwise/select.py | 2 ++ homeassistant/components/plugwise/sensor.py | 3 +++ homeassistant/components/plugwise/switch.py | 2 ++ 8 files changed, 17 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plugwise/binary_sensor.py b/homeassistant/components/plugwise/binary_sensor.py index f422d4facf3..539fa243d6c 100644 --- a/homeassistant/components/plugwise/binary_sensor.py +++ b/homeassistant/components/plugwise/binary_sensor.py @@ -23,6 +23,9 @@ from .entity import PlugwiseEntity SEVERITIES = ["other", "info", "warning", "error"] +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True) class PlugwiseBinarySensorEntityDescription(BinarySensorEntityDescription): diff --git a/homeassistant/components/plugwise/button.py b/homeassistant/components/plugwise/button.py index 078d31bea12..8a05ede3496 100644 --- a/homeassistant/components/plugwise/button.py +++ b/homeassistant/components/plugwise/button.py @@ -13,6 +13,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index fb0124e144d..3cf536eb445 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -24,6 +24,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/plugwise/number.py b/homeassistant/components/plugwise/number.py index 833ea3ec761..1d0b1382c24 100644 --- a/homeassistant/components/plugwise/number.py +++ b/homeassistant/components/plugwise/number.py @@ -20,6 +20,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class PlugwiseNumberEntityDescription(NumberEntityDescription): diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml index a6b364cf381..ce0788c44f7 100644 --- a/homeassistant/components/plugwise/quality_scale.yaml +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -32,9 +32,7 @@ rules: reauthentication-flow: status: exempt comment: The hubs have a hardcoded `Smile ID` printed on the sticker used as password, it can not be changed - parallel-updates: - status: todo - comment: Using coordinator, but required due to mutable platform + parallel-updates: done test-coverage: done integration-owner: done docs-installation-parameters: diff --git a/homeassistant/components/plugwise/select.py b/homeassistant/components/plugwise/select.py index 46b27ca6225..ff268d8eded 100644 --- a/homeassistant/components/plugwise/select.py +++ b/homeassistant/components/plugwise/select.py @@ -15,6 +15,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class PlugwiseSelectEntityDescription(SelectEntityDescription): diff --git a/homeassistant/components/plugwise/sensor.py b/homeassistant/components/plugwise/sensor.py index 41ca439451a..14b42682376 100644 --- a/homeassistant/components/plugwise/sensor.py +++ b/homeassistant/components/plugwise/sensor.py @@ -31,6 +31,9 @@ from . import PlugwiseConfigEntry from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True) class PlugwiseSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/plugwise/switch.py b/homeassistant/components/plugwise/switch.py index 305518f4bef..ea6d6f18b7f 100644 --- a/homeassistant/components/plugwise/switch.py +++ b/homeassistant/components/plugwise/switch.py @@ -21,6 +21,8 @@ from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command +PARALLEL_UPDATES = 0 + @dataclass(frozen=True) class PlugwiseSwitchEntityDescription(SwitchEntityDescription): From e39897ff9a024b4f163e27c6a357e427ea2c7047 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 12 Dec 2024 01:55:29 -0500 Subject: [PATCH 094/677] Enforce strict typing for Russound RIO (#132982) --- .strict-typing | 1 + .../components/russound_rio/media_player.py | 14 +++++++------- .../components/russound_rio/quality_scale.yaml | 2 +- mypy.ini | 10 ++++++++++ 4 files changed, 19 insertions(+), 8 deletions(-) diff --git a/.strict-typing b/.strict-typing index a45be32c3c6..130ae6e9393 100644 --- a/.strict-typing +++ b/.strict-typing @@ -402,6 +402,7 @@ homeassistant.components.romy.* homeassistant.components.rpi_power.* homeassistant.components.rss_feed_template.* homeassistant.components.rtsp_to_webrtc.* +homeassistant.components.russound_rio.* homeassistant.components.ruuvi_gateway.* homeassistant.components.ruuvitag_ble.* homeassistant.components.samsungtv.* diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 12b41485167..d0d8e02a282 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -148,37 +148,37 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): return MediaPlayerState.ON @property - def source(self): + def source(self) -> str: """Get the currently selected source.""" return self._source.name @property - def source_list(self): + def source_list(self) -> list[str]: """Return a list of available input sources.""" return [x.name for x in self._sources.values()] @property - def media_title(self): + def media_title(self) -> str | None: """Title of current playing media.""" return self._source.song_name @property - def media_artist(self): + def media_artist(self) -> str | None: """Artist of current playing media, music track only.""" return self._source.artist_name @property - def media_album_name(self): + def media_album_name(self) -> str | None: """Album name of current playing media, music track only.""" return self._source.album_name @property - def media_image_url(self): + def media_image_url(self) -> str | None: """Image url of current playing media.""" return self._source.cover_art_url @property - def volume_level(self): + def volume_level(self) -> float: """Volume level of the media player (0..1). Value is returned based on a range (0..50). diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 4c7214cfd8b..aaa354b2b31 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -83,4 +83,4 @@ rules: status: exempt comment: | This integration uses telnet exclusively and does not make http calls. - strict-typing: todo + strict-typing: done diff --git a/mypy.ini b/mypy.ini index 4e5d4212ee9..a0c441c44f9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3775,6 +3775,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.russound_rio.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.ruuvi_gateway.*] check_untyped_defs = true disallow_incomplete_defs = true From 2d0c4e4a591737a18696fe74740027aa6dcce161 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 12 Dec 2024 01:56:29 -0500 Subject: [PATCH 095/677] Improve config flow test coverage for Russound RIO (#132981) --- .../russound_rio/quality_scale.yaml | 5 +--- tests/components/russound_rio/__init__.py | 12 ++++++++ .../russound_rio/test_config_flow.py | 29 +++++++++++++++++++ tests/components/russound_rio/test_init.py | 26 +++++++++++++++-- .../russound_rio/test_media_player.py | 10 ++----- 5 files changed, 68 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index aaa354b2b31..2d396892aa8 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -10,10 +10,7 @@ rules: This integration uses a push API. No polling required. brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: | - Missing unique_id check in test_form() and test_import(). Test for adding same device twice missing. + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: diff --git a/tests/components/russound_rio/__init__.py b/tests/components/russound_rio/__init__.py index d0e6d77f1ee..d8764285dd3 100644 --- a/tests/components/russound_rio/__init__.py +++ b/tests/components/russound_rio/__init__.py @@ -1,5 +1,9 @@ """Tests for the Russound RIO integration.""" +from unittest.mock import AsyncMock + +from aiorussound.models import CallbackType + from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -11,3 +15,11 @@ async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + + +async def mock_state_update( + client: AsyncMock, callback_type: CallbackType = CallbackType.STATE +) -> None: + """Trigger a callback in the media player.""" + for callback in client.register_state_update_callbacks.call_args_list: + await callback[0][0](client, callback_type) diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py index cf754852731..28cbf7eda5e 100644 --- a/tests/components/russound_rio/test_config_flow.py +++ b/tests/components/russound_rio/test_config_flow.py @@ -9,6 +9,8 @@ from homeassistant.data_entry_flow import FlowResultType from .const import MOCK_CONFIG, MODEL +from tests.common import MockConfigEntry + async def test_form( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock @@ -29,6 +31,7 @@ async def test_form( assert result["title"] == MODEL assert result["data"] == MOCK_CONFIG assert len(mock_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "00:11:22:33:44:55" async def test_form_cannot_connect( @@ -60,6 +63,31 @@ async def test_form_cannot_connect( assert len(mock_setup_entry.mock_calls) == 1 +async def test_duplicate( + hass: HomeAssistant, + mock_russound_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_import( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: @@ -74,6 +102,7 @@ async def test_import( assert result["title"] == MODEL assert result["data"] == MOCK_CONFIG assert len(mock_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "00:11:22:33:44:55" async def test_import_cannot_connect( diff --git a/tests/components/russound_rio/test_init.py b/tests/components/russound_rio/test_init.py index 6787ee37c79..e7022fa6ac1 100644 --- a/tests/components/russound_rio/test_init.py +++ b/tests/components/russound_rio/test_init.py @@ -1,7 +1,9 @@ """Tests for the Russound RIO integration.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, Mock +from aiorussound.models import CallbackType +import pytest from syrupy import SnapshotAssertion from homeassistant.components.russound_rio.const import DOMAIN @@ -9,7 +11,7 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from . import setup_integration +from . import mock_state_update, setup_integration from tests.common import MockConfigEntry @@ -42,3 +44,23 @@ async def test_device_info( ) assert device_entry is not None assert device_entry == snapshot + + +async def test_disconnect_reconnect_log( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_russound_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + + mock_russound_client.is_connected = Mock(return_value=False) + await mock_state_update(mock_russound_client, CallbackType.CONNECTION) + assert "Disconnected from device at 127.0.0.1" in caplog.text + + mock_russound_client.is_connected = Mock(return_value=True) + await mock_state_update(mock_russound_client, CallbackType.CONNECTION) + assert "Reconnected to device at 127.0.0.1" in caplog.text diff --git a/tests/components/russound_rio/test_media_player.py b/tests/components/russound_rio/test_media_player.py index e720e2c7f65..c740ec4f39e 100644 --- a/tests/components/russound_rio/test_media_player.py +++ b/tests/components/russound_rio/test_media_player.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiorussound.models import CallbackType, PlayStatus +from aiorussound.models import PlayStatus import pytest from homeassistant.const import ( @@ -15,18 +15,12 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from . import setup_integration +from . import mock_state_update, setup_integration from .const import ENTITY_ID_ZONE_1 from tests.common import MockConfigEntry -async def mock_state_update(client: AsyncMock) -> None: - """Trigger a callback in the media player.""" - for callback in client.register_state_update_callbacks.call_args_list: - await callback[0][0](client, CallbackType.STATE) - - @pytest.mark.parametrize( ("zone_status", "source_play_status", "media_player_state"), [ From 0d4780e91b0bb92c255983e19b144f3352aa4b1c Mon Sep 17 00:00:00 2001 From: Chris Talkington Date: Thu, 12 Dec 2024 01:00:24 -0600 Subject: [PATCH 096/677] Set parallel updates for roku (#132892) * Set parallel updates for roku * Update sensor.py * Update media_player.py * Update remote.py * Update select.py * Update media_player.py * Update remote.py * Update select.py * Update remote.py * Update media_player.py --- homeassistant/components/roku/binary_sensor.py | 3 +++ homeassistant/components/roku/media_player.py | 3 ++- homeassistant/components/roku/remote.py | 2 ++ homeassistant/components/roku/select.py | 2 ++ homeassistant/components/roku/sensor.py | 3 +++ 5 files changed, 12 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/roku/binary_sensor.py b/homeassistant/components/roku/binary_sensor.py index cd51c30c250..2e7fd12788c 100644 --- a/homeassistant/components/roku/binary_sensor.py +++ b/homeassistant/components/roku/binary_sensor.py @@ -18,6 +18,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RokuConfigEntry from .entity import RokuEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RokuBinarySensorEntityDescription(BinarySensorEntityDescription): diff --git a/homeassistant/components/roku/media_player.py b/homeassistant/components/roku/media_player.py index d43d62c9438..0c1f92521af 100644 --- a/homeassistant/components/roku/media_player.py +++ b/homeassistant/components/roku/media_player.py @@ -46,7 +46,6 @@ from .helpers import format_channel_name, roku_exception_handler _LOGGER = logging.getLogger(__name__) - STREAM_FORMAT_TO_MEDIA_TYPE = { "dash": MediaType.VIDEO, "hls": MediaType.VIDEO, @@ -80,6 +79,8 @@ ATTRS_TO_PLAY_ON_ROKU_AUDIO_PARAMS = { SEARCH_SCHEMA: VolDictType = {vol.Required(ATTR_KEYWORD): str} +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, entry: RokuConfigEntry, async_add_entities: AddEntitiesCallback diff --git a/homeassistant/components/roku/remote.py b/homeassistant/components/roku/remote.py index 9a31f9fd7a0..f7916fb23a2 100644 --- a/homeassistant/components/roku/remote.py +++ b/homeassistant/components/roku/remote.py @@ -13,6 +13,8 @@ from . import RokuConfigEntry from .entity import RokuEntity from .helpers import roku_exception_handler +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/roku/select.py b/homeassistant/components/roku/select.py index 6977f8c0d24..360d4e25415 100644 --- a/homeassistant/components/roku/select.py +++ b/homeassistant/components/roku/select.py @@ -16,6 +16,8 @@ from . import RokuConfigEntry from .entity import RokuEntity from .helpers import format_channel_name, roku_exception_handler +PARALLEL_UPDATES = 1 + def _get_application_name(device: RokuDevice) -> str | None: if device.app is None or device.app.name is None: diff --git a/homeassistant/components/roku/sensor.py b/homeassistant/components/roku/sensor.py index 56a84ead402..870386945a6 100644 --- a/homeassistant/components/roku/sensor.py +++ b/homeassistant/components/roku/sensor.py @@ -15,6 +15,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RokuConfigEntry from .entity import RokuEntity +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RokuSensorEntityDescription(SensorEntityDescription): From 053f03ac58bc61b077910f13d486bef4a535be86 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Thu, 12 Dec 2024 02:03:05 -0600 Subject: [PATCH 097/677] Change warning to debug for VAD timeout (#132987) --- homeassistant/components/assist_pipeline/vad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index deae5b9b7b3..c7fe1bc10c7 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -140,7 +140,7 @@ class VoiceCommandSegmenter: self._timeout_seconds_left -= chunk_seconds if self._timeout_seconds_left <= 0: - _LOGGER.warning( + _LOGGER.debug( "VAD end of speech detection timed out after %s seconds", self.timeout_seconds, ) From 85d4c48d6f2120e4b99ae694407bdd77ee45d68c Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 09:53:26 +0100 Subject: [PATCH 098/677] Set parallel updates in Elgato (#132998) --- homeassistant/components/elgato/button.py | 2 ++ homeassistant/components/elgato/quality_scale.yaml | 5 +---- homeassistant/components/elgato/switch.py | 2 ++ 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/elgato/button.py b/homeassistant/components/elgato/button.py index aefff0b750b..6f9436b8e29 100644 --- a/homeassistant/components/elgato/button.py +++ b/homeassistant/components/elgato/button.py @@ -22,6 +22,8 @@ from . import ElgatorConfigEntry from .coordinator import ElgatoDataUpdateCoordinator from .entity import ElgatoEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class ElgatoButtonEntityDescription(ButtonEntityDescription): diff --git a/homeassistant/components/elgato/quality_scale.yaml b/homeassistant/components/elgato/quality_scale.yaml index 513940e2438..531f0447f70 100644 --- a/homeassistant/components/elgato/quality_scale.yaml +++ b/homeassistant/components/elgato/quality_scale.yaml @@ -30,10 +30,7 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: - status: todo - comment: | - Does not set parallel-updates on button/switch action calls. + parallel-updates: done reauthentication-flow: status: exempt comment: | diff --git a/homeassistant/components/elgato/switch.py b/homeassistant/components/elgato/switch.py index fe177616034..643f148ec7d 100644 --- a/homeassistant/components/elgato/switch.py +++ b/homeassistant/components/elgato/switch.py @@ -18,6 +18,8 @@ from . import ElgatorConfigEntry from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class ElgatoSwitchEntityDescription(SwitchEntityDescription): From bb610acb8614de586000d659ccc7bb3012858b04 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 09:53:55 +0100 Subject: [PATCH 099/677] Migrate elgato light tests to use Kelvin (#133004) --- tests/components/elgato/test_light.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/elgato/test_light.py b/tests/components/elgato/test_light.py index 40c0232c2b3..43fad1faa77 100644 --- a/tests/components/elgato/test_light.py +++ b/tests/components/elgato/test_light.py @@ -9,7 +9,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components.elgato.const import DOMAIN, SERVICE_IDENTIFY from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, DOMAIN as LIGHT_DOMAIN, ) @@ -74,7 +74,7 @@ async def test_light_change_state_temperature( { ATTR_ENTITY_ID: "light.frenck", ATTR_BRIGHTNESS: 255, - ATTR_COLOR_TEMP: 100, + ATTR_COLOR_TEMP_KELVIN: 10000, }, blocking=True, ) From 0377dc5b5a7c46b18aa817fa6c4ad336f86d6953 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 10:18:11 +0100 Subject: [PATCH 100/677] Move coordinator for TwenteMilieu into own module (#133000) --- .../components/twentemilieu/__init__.py | 32 +----------- .../components/twentemilieu/calendar.py | 2 +- .../components/twentemilieu/coordinator.py | 49 +++++++++++++++++++ .../components/twentemilieu/entity.py | 2 +- .../twentemilieu/quality_scale.yaml | 5 +- .../components/twentemilieu/sensor.py | 2 +- tests/components/twentemilieu/conftest.py | 3 +- tests/components/twentemilieu/test_init.py | 2 +- 8 files changed, 58 insertions(+), 39 deletions(-) create mode 100644 homeassistant/components/twentemilieu/coordinator.py diff --git a/homeassistant/components/twentemilieu/__init__.py b/homeassistant/components/twentemilieu/__init__.py index 2796e9916f1..1359e707601 100644 --- a/homeassistant/components/twentemilieu/__init__.py +++ b/homeassistant/components/twentemilieu/__init__.py @@ -2,53 +2,25 @@ from __future__ import annotations -from datetime import date, timedelta - -from twentemilieu import TwenteMilieu, WasteType import voluptuous as vol -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import CONF_HOUSE_LETTER, CONF_HOUSE_NUMBER, CONF_POST_CODE, DOMAIN, LOGGER - -SCAN_INTERVAL = timedelta(seconds=3600) +from .coordinator import TwenteMilieuConfigEntry, TwenteMilieuDataUpdateCoordinator SERVICE_UPDATE = "update" SERVICE_SCHEMA = vol.Schema({vol.Optional(CONF_ID): cv.string}) PLATFORMS = [Platform.CALENDAR, Platform.SENSOR] -type TwenteMilieuDataUpdateCoordinator = DataUpdateCoordinator[ - dict[WasteType, list[date]] -] -type TwenteMilieuConfigEntry = ConfigEntry[TwenteMilieuDataUpdateCoordinator] - async def async_setup_entry( hass: HomeAssistant, entry: TwenteMilieuConfigEntry ) -> bool: """Set up Twente Milieu from a config entry.""" - session = async_get_clientsession(hass) - twentemilieu = TwenteMilieu( - post_code=entry.data[CONF_POST_CODE], - house_number=entry.data[CONF_HOUSE_NUMBER], - house_letter=entry.data[CONF_HOUSE_LETTER], - session=session, - ) - - coordinator: TwenteMilieuDataUpdateCoordinator = DataUpdateCoordinator( - hass, - LOGGER, - config_entry=entry, - name=DOMAIN, - update_interval=SCAN_INTERVAL, - update_method=twentemilieu.update, - ) + coordinator = TwenteMilieuDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/twentemilieu/calendar.py b/homeassistant/components/twentemilieu/calendar.py index 8e7452823b7..d163ae4e564 100644 --- a/homeassistant/components/twentemilieu/calendar.py +++ b/homeassistant/components/twentemilieu/calendar.py @@ -10,8 +10,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import TwenteMilieuConfigEntry from .const import WASTE_TYPE_TO_DESCRIPTION +from .coordinator import TwenteMilieuConfigEntry from .entity import TwenteMilieuEntity diff --git a/homeassistant/components/twentemilieu/coordinator.py b/homeassistant/components/twentemilieu/coordinator.py new file mode 100644 index 00000000000..d2cf5a887ef --- /dev/null +++ b/homeassistant/components/twentemilieu/coordinator.py @@ -0,0 +1,49 @@ +"""Data update coordinator for Twente Milieu.""" + +from __future__ import annotations + +from datetime import date + +from twentemilieu import TwenteMilieu, WasteType + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import ( + CONF_HOUSE_LETTER, + CONF_HOUSE_NUMBER, + CONF_POST_CODE, + DOMAIN, + LOGGER, + SCAN_INTERVAL, +) + +type TwenteMilieuConfigEntry = ConfigEntry[TwenteMilieuDataUpdateCoordinator] + + +class TwenteMilieuDataUpdateCoordinator( + DataUpdateCoordinator[dict[WasteType, list[date]]] +): + """Class to manage fetching Twente Milieu data.""" + + def __init__(self, hass: HomeAssistant, entry: TwenteMilieuConfigEntry) -> None: + """Initialize Twente Milieu data update coordinator.""" + self.twentemilieu = TwenteMilieu( + post_code=entry.data[CONF_POST_CODE], + house_number=entry.data[CONF_HOUSE_NUMBER], + house_letter=entry.data[CONF_HOUSE_LETTER], + session=async_get_clientsession(hass), + ) + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + config_entry=entry, + ) + + async def _async_update_data(self) -> dict[WasteType, list[date]]: + """Fetch Twente Milieu data.""" + return await self.twentemilieu.update() diff --git a/homeassistant/components/twentemilieu/entity.py b/homeassistant/components/twentemilieu/entity.py index 0a2473f4524..660dd16288c 100644 --- a/homeassistant/components/twentemilieu/entity.py +++ b/homeassistant/components/twentemilieu/entity.py @@ -7,8 +7,8 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import Entity from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import TwenteMilieuConfigEntry, TwenteMilieuDataUpdateCoordinator from .const import DOMAIN +from .coordinator import TwenteMilieuConfigEntry, TwenteMilieuDataUpdateCoordinator class TwenteMilieuEntity(CoordinatorEntity[TwenteMilieuDataUpdateCoordinator], Entity): diff --git a/homeassistant/components/twentemilieu/quality_scale.yaml b/homeassistant/components/twentemilieu/quality_scale.yaml index f8fd813b03d..210416e56c5 100644 --- a/homeassistant/components/twentemilieu/quality_scale.yaml +++ b/homeassistant/components/twentemilieu/quality_scale.yaml @@ -6,10 +6,7 @@ rules: This integration does not provide additional actions. appropriate-polling: done brands: done - common-modules: - status: todo - comment: | - The coordinator isn't in the common module yet. + common-modules: done config-flow-test-coverage: done config-flow: status: todo diff --git a/homeassistant/components/twentemilieu/sensor.py b/homeassistant/components/twentemilieu/sensor.py index f5f91ce7080..4605ede1f87 100644 --- a/homeassistant/components/twentemilieu/sensor.py +++ b/homeassistant/components/twentemilieu/sensor.py @@ -16,8 +16,8 @@ from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import TwenteMilieuConfigEntry from .const import DOMAIN +from .coordinator import TwenteMilieuConfigEntry from .entity import TwenteMilieuEntity diff --git a/tests/components/twentemilieu/conftest.py b/tests/components/twentemilieu/conftest.py index 7ecf1657ce9..e3e3c97034c 100644 --- a/tests/components/twentemilieu/conftest.py +++ b/tests/components/twentemilieu/conftest.py @@ -51,7 +51,8 @@ def mock_twentemilieu() -> Generator[MagicMock]: """Return a mocked Twente Milieu client.""" with ( patch( - "homeassistant.components.twentemilieu.TwenteMilieu", autospec=True + "homeassistant.components.twentemilieu.coordinator.TwenteMilieu", + autospec=True, ) as twentemilieu_mock, patch( "homeassistant.components.twentemilieu.config_flow.TwenteMilieu", diff --git a/tests/components/twentemilieu/test_init.py b/tests/components/twentemilieu/test_init.py index 7e08b5f4938..5cc09e6875d 100644 --- a/tests/components/twentemilieu/test_init.py +++ b/tests/components/twentemilieu/test_init.py @@ -29,7 +29,7 @@ async def test_load_unload_config_entry( @patch( - "homeassistant.components.twentemilieu.TwenteMilieu.update", + "homeassistant.components.twentemilieu.coordinator.TwenteMilieu.update", side_effect=RuntimeError, ) async def test_config_entry_not_ready( From 4a7039f51d1521377410a9af45800d839f171072 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Thu, 12 Dec 2024 10:25:21 +0100 Subject: [PATCH 101/677] Bump velbusaio to 2024.12.0 (#132989) --- homeassistant/components/velbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 84262ebd61c..5725a10b6f6 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.11.1"], + "requirements": ["velbus-aio==2024.12.0"], "usb": [ { "vid": "10CF", diff --git a/requirements_all.txt b/requirements_all.txt index 10b8c650127..26acf53fa53 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2939,7 +2939,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.11.1 +velbus-aio==2024.12.0 # homeassistant.components.venstar venstarcolortouch==0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 194e29e35e8..afe7252f9f8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2349,7 +2349,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.11.1 +velbus-aio==2024.12.0 # homeassistant.components.venstar venstarcolortouch==0.19 From d49b1b2d6b23a5e1730076b1bb8787cc8734ea3a Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Thu, 12 Dec 2024 10:28:41 +0100 Subject: [PATCH 102/677] Use ConfigEntry runtime_data in EnergyZero (#132979) --- .../components/energyzero/__init__.py | 15 ++++----- .../components/energyzero/diagnostics.py | 32 ++++++++----------- homeassistant/components/energyzero/sensor.py | 8 +++-- .../components/energyzero/services.py | 2 +- tests/components/energyzero/test_init.py | 2 -- 5 files changed, 27 insertions(+), 32 deletions(-) diff --git a/homeassistant/components/energyzero/__init__.py b/homeassistant/components/energyzero/__init__.py index 3e1bb830cce..f7591056383 100644 --- a/homeassistant/components/energyzero/__init__.py +++ b/homeassistant/components/energyzero/__init__.py @@ -13,9 +13,11 @@ from .const import DOMAIN from .coordinator import EnergyZeroDataUpdateCoordinator from .services import async_setup_services -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) +type EnergyZeroConfigEntry = ConfigEntry[EnergyZeroDataUpdateCoordinator] + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up EnergyZero services.""" @@ -25,7 +27,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> bool: """Set up EnergyZero from a config entry.""" coordinator = EnergyZeroDataUpdateCoordinator(hass) @@ -35,15 +37,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await coordinator.energyzero.close() raise - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> bool: """Unload EnergyZero config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/energyzero/diagnostics.py b/homeassistant/components/energyzero/diagnostics.py index 35d20fee929..ee1286598e6 100644 --- a/homeassistant/components/energyzero/diagnostics.py +++ b/homeassistant/components/energyzero/diagnostics.py @@ -5,11 +5,9 @@ from __future__ import annotations from datetime import timedelta from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import EnergyZeroDataUpdateCoordinator -from .const import DOMAIN +from . import EnergyZeroConfigEntry from .coordinator import EnergyZeroData @@ -32,30 +30,28 @@ def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: EnergyZeroConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: EnergyZeroDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - return { "entry": { "title": entry.title, }, "energy": { - "current_hour_price": coordinator.data.energy_today.current_price, - "next_hour_price": coordinator.data.energy_today.price_at_time( - coordinator.data.energy_today.utcnow() + timedelta(hours=1) + "current_hour_price": entry.runtime_data.data.energy_today.current_price, + "next_hour_price": entry.runtime_data.data.energy_today.price_at_time( + entry.runtime_data.data.energy_today.utcnow() + timedelta(hours=1) ), - "average_price": coordinator.data.energy_today.average_price, - "max_price": coordinator.data.energy_today.extreme_prices[1], - "min_price": coordinator.data.energy_today.extreme_prices[0], - "highest_price_time": coordinator.data.energy_today.highest_price_time, - "lowest_price_time": coordinator.data.energy_today.lowest_price_time, - "percentage_of_max": coordinator.data.energy_today.pct_of_max_price, - "hours_priced_equal_or_lower": coordinator.data.energy_today.hours_priced_equal_or_lower, + "average_price": entry.runtime_data.data.energy_today.average_price, + "max_price": entry.runtime_data.data.energy_today.extreme_prices[1], + "min_price": entry.runtime_data.data.energy_today.extreme_prices[0], + "highest_price_time": entry.runtime_data.data.energy_today.highest_price_time, + "lowest_price_time": entry.runtime_data.data.energy_today.lowest_price_time, + "percentage_of_max": entry.runtime_data.data.energy_today.pct_of_max_price, + "hours_priced_equal_or_lower": entry.runtime_data.data.energy_today.hours_priced_equal_or_lower, }, "gas": { - "current_hour_price": get_gas_price(coordinator.data, 0), - "next_hour_price": get_gas_price(coordinator.data, 1), + "current_hour_price": get_gas_price(entry.runtime_data.data, 0), + "next_hour_price": get_gas_price(entry.runtime_data.data, 1), }, } diff --git a/homeassistant/components/energyzero/sensor.py b/homeassistant/components/energyzero/sensor.py index f65f7bd559c..d52da599966 100644 --- a/homeassistant/components/energyzero/sensor.py +++ b/homeassistant/components/energyzero/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CURRENCY_EURO, PERCENTAGE, @@ -26,6 +25,7 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import EnergyZeroConfigEntry from .const import DOMAIN, SERVICE_TYPE_DEVICE_NAMES from .coordinator import EnergyZeroData, EnergyZeroDataUpdateCoordinator @@ -142,10 +142,12 @@ def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: EnergyZeroConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up EnergyZero Sensors based on a config entry.""" - coordinator: EnergyZeroDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( EnergyZeroSensorEntity( coordinator=coordinator, diff --git a/homeassistant/components/energyzero/services.py b/homeassistant/components/energyzero/services.py index d98699c5c08..b281274575e 100644 --- a/homeassistant/components/energyzero/services.py +++ b/homeassistant/components/energyzero/services.py @@ -107,7 +107,7 @@ def __get_coordinator( }, ) - coordinator: EnergyZeroDataUpdateCoordinator = hass.data[DOMAIN][entry_id] + coordinator: EnergyZeroDataUpdateCoordinator = entry.runtime_data return coordinator diff --git a/tests/components/energyzero/test_init.py b/tests/components/energyzero/test_init.py index 287157026f4..f8e7e75e902 100644 --- a/tests/components/energyzero/test_init.py +++ b/tests/components/energyzero/test_init.py @@ -5,7 +5,6 @@ from unittest.mock import MagicMock, patch from energyzero import EnergyZeroConnectionError import pytest -from homeassistant.components.energyzero.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -26,7 +25,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED From a30c942fa7246d7781a74ef6ad1239274bf215af Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 12 Dec 2024 10:42:27 +0100 Subject: [PATCH 103/677] Don't use kitchen_sink integration in config entries tests (#133012) --- .../components/config/test_config_entries.py | 22 ++++++------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index b96aa9ae006..4a3bff47d89 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -255,9 +255,7 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: async def test_remove_entry(hass: HomeAssistant, client: TestClient) -> None: """Test removing an entry via the API.""" - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) resp = await client.delete(f"/api/config/config_entries/entry/{entry.entry_id}") assert resp.status == HTTPStatus.OK @@ -268,11 +266,9 @@ async def test_remove_entry(hass: HomeAssistant, client: TestClient) -> None: async def test_reload_entry(hass: HomeAssistant, client: TestClient) -> None: """Test reloading an entry via the API.""" - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) - hass.config.components.add("kitchen_sink") + hass.config.components.add("test") resp = await client.post( f"/api/config/config_entries/entry/{entry.entry_id}/reload" ) @@ -1157,11 +1153,9 @@ async def test_update_prefrences( assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) - hass.config.components.add("kitchen_sink") + hass.config.components.add("test") assert entry.pref_disable_new_entities is False assert entry.pref_disable_polling is False @@ -1257,12 +1251,10 @@ async def test_disable_entry( assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - entry = MockConfigEntry( - domain="kitchen_sink", state=core_ce.ConfigEntryState.LOADED - ) + entry = MockConfigEntry(domain="test", state=core_ce.ConfigEntryState.LOADED) entry.add_to_hass(hass) assert entry.disabled_by is None - hass.config.components.add("kitchen_sink") + hass.config.components.add("test") # Disable await ws_client.send_json( From 7dc31dec3b05a28af46b36f830bacec426bdaebf Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 10:52:03 +0100 Subject: [PATCH 104/677] Fix config entry import in Twente Milieu diagnostic (#133017) --- homeassistant/components/twentemilieu/diagnostics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/twentemilieu/diagnostics.py b/homeassistant/components/twentemilieu/diagnostics.py index 75775303eb6..cb3b411c530 100644 --- a/homeassistant/components/twentemilieu/diagnostics.py +++ b/homeassistant/components/twentemilieu/diagnostics.py @@ -6,7 +6,7 @@ from typing import Any from homeassistant.core import HomeAssistant -from . import TwenteMilieuConfigEntry +from .coordinator import TwenteMilieuConfigEntry async def async_get_config_entry_diagnostics( From 0e45ccb9566fd92529f5b27b38dc2ab869c57085 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 11:13:24 +0100 Subject: [PATCH 105/677] Migrate google_assistant color_temp handlers to use Kelvin (#132997) --- .../components/google_assistant/trait.py | 29 +++++++------------ .../google_assistant/test_google_assistant.py | 2 +- .../google_assistant/test_smart_home.py | 2 +- .../components/google_assistant/test_trait.py | 16 +++++----- 4 files changed, 21 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index 8025a291031..44251a3be04 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -553,15 +553,9 @@ class ColorSettingTrait(_Trait): response["colorModel"] = "hsv" if light.color_temp_supported(color_modes): - # Max Kelvin is Min Mireds K = 1000000 / mireds - # Min Kelvin is Max Mireds K = 1000000 / mireds response["colorTemperatureRange"] = { - "temperatureMaxK": color_util.color_temperature_mired_to_kelvin( - attrs.get(light.ATTR_MIN_MIREDS) - ), - "temperatureMinK": color_util.color_temperature_mired_to_kelvin( - attrs.get(light.ATTR_MAX_MIREDS) - ), + "temperatureMaxK": int(attrs.get(light.ATTR_MAX_COLOR_TEMP_KELVIN)), + "temperatureMinK": int(attrs.get(light.ATTR_MIN_COLOR_TEMP_KELVIN)), } return response @@ -583,7 +577,7 @@ class ColorSettingTrait(_Trait): } if light.color_temp_supported([color_mode]): - temp = self.state.attributes.get(light.ATTR_COLOR_TEMP) + temp = self.state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) # Some faulty integrations might put 0 in here, raising exception. if temp == 0: _LOGGER.warning( @@ -592,9 +586,7 @@ class ColorSettingTrait(_Trait): temp, ) elif temp is not None: - color["temperatureK"] = color_util.color_temperature_mired_to_kelvin( - temp - ) + color["temperatureK"] = temp response = {} @@ -606,11 +598,9 @@ class ColorSettingTrait(_Trait): async def execute(self, command, data, params, challenge): """Execute a color temperature command.""" if "temperature" in params["color"]: - temp = color_util.color_temperature_kelvin_to_mired( - params["color"]["temperature"] - ) - min_temp = self.state.attributes[light.ATTR_MIN_MIREDS] - max_temp = self.state.attributes[light.ATTR_MAX_MIREDS] + temp = params["color"]["temperature"] + max_temp = self.state.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN] + min_temp = self.state.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN] if temp < min_temp or temp > max_temp: raise SmartHomeError( @@ -621,7 +611,10 @@ class ColorSettingTrait(_Trait): await self.hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: self.state.entity_id, light.ATTR_COLOR_TEMP: temp}, + { + ATTR_ENTITY_ID: self.state.entity_id, + light.ATTR_COLOR_TEMP_KELVIN: temp, + }, blocking=not self.config.should_report_state, context=data.context, ) diff --git a/tests/components/google_assistant/test_google_assistant.py b/tests/components/google_assistant/test_google_assistant.py index ea30f89e0ef..2b0bfd82908 100644 --- a/tests/components/google_assistant/test_google_assistant.py +++ b/tests/components/google_assistant/test_google_assistant.py @@ -491,7 +491,7 @@ async def test_execute_request(hass_fixture, assistant_client, auth_header) -> N assert kitchen.attributes.get(light.ATTR_RGB_COLOR) == (255, 0, 0) bed = hass_fixture.states.get("light.bed_light") - assert bed.attributes.get(light.ATTR_COLOR_TEMP) == 212 + assert bed.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 4700 assert hass_fixture.states.get("switch.decorative_lights").state == "off" diff --git a/tests/components/google_assistant/test_smart_home.py b/tests/components/google_assistant/test_smart_home.py index c5e17155067..a1c2ba1b3d4 100644 --- a/tests/components/google_assistant/test_smart_home.py +++ b/tests/components/google_assistant/test_smart_home.py @@ -1450,7 +1450,7 @@ async def test_sync_message_recovery( "light.bad_light", "on", { - "min_mireds": "badvalue", + "max_color_temp_kelvin": "badvalue", "supported_color_modes": ["color_temp"], }, ) diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index 9e9c7015674..d269b5ff0d7 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -77,7 +77,7 @@ from homeassistant.const import ( ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.core_config import async_process_ha_core_config -from homeassistant.util import color, dt as dt_util +from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import TemperatureConverter from . import BASIC_CONFIG, MockConfig @@ -870,10 +870,10 @@ async def test_color_setting_temperature_light(hass: HomeAssistant) -> None: "light.bla", STATE_ON, { - light.ATTR_MIN_MIREDS: 200, + light.ATTR_MAX_COLOR_TEMP_KELVIN: 5000, light.ATTR_COLOR_MODE: "color_temp", - light.ATTR_COLOR_TEMP: 300, - light.ATTR_MAX_MIREDS: 500, + light.ATTR_COLOR_TEMP_KELVIN: 3333, + light.ATTR_MIN_COLOR_TEMP_KELVIN: 2000, "supported_color_modes": ["color_temp"], }, ), @@ -906,7 +906,7 @@ async def test_color_setting_temperature_light(hass: HomeAssistant) -> None: assert len(calls) == 1 assert calls[0].data == { ATTR_ENTITY_ID: "light.bla", - light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857), + light.ATTR_COLOR_TEMP_KELVIN: 2857, } @@ -924,9 +924,9 @@ async def test_color_light_temperature_light_bad_temp(hass: HomeAssistant) -> No "light.bla", STATE_ON, { - light.ATTR_MIN_MIREDS: 200, - light.ATTR_COLOR_TEMP: 0, - light.ATTR_MAX_MIREDS: 500, + light.ATTR_MAX_COLOR_TEMP_KELVIN: 5000, + light.ATTR_COLOR_TEMP_KELVIN: 0, + light.ATTR_MIN_COLOR_TEMP_KELVIN: 2000, }, ), BASIC_CONFIG, From a9d71e0a5fb15b2f6750dbf9ec32e1e118eced8b Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Thu, 12 Dec 2024 11:34:36 +0100 Subject: [PATCH 106/677] Add reconfigure flow for Powerfox integration (#132260) --- .../components/powerfox/config_flow.py | 33 ++++++ .../components/powerfox/quality_scale.yaml | 2 +- .../components/powerfox/strings.json | 15 ++- tests/components/powerfox/test_config_flow.py | 105 ++++++++++++++++++ 4 files changed, 153 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/powerfox/config_flow.py b/homeassistant/components/powerfox/config_flow.py index ca78b8eb874..dd17badf881 100644 --- a/homeassistant/components/powerfox/config_flow.py +++ b/homeassistant/components/powerfox/config_flow.py @@ -100,3 +100,36 @@ class PowerfoxConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=STEP_REAUTH_SCHEMA, errors=errors, ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Reconfigure Powerfox configuration.""" + errors = {} + + reconfigure_entry = self._get_reconfigure_entry() + if user_input is not None: + client = Powerfox( + username=user_input[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + session=async_get_clientsession(self.hass), + ) + try: + await client.all_devices() + except PowerfoxAuthenticationError: + errors["base"] = "invalid_auth" + except PowerfoxConnectionError: + errors["base"] = "cannot_connect" + else: + if reconfigure_entry.data[CONF_EMAIL] != user_input[CONF_EMAIL]: + self._async_abort_entries_match( + {CONF_EMAIL: user_input[CONF_EMAIL]} + ) + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates=user_input + ) + return self.async_show_form( + step_id="reconfigure", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/powerfox/quality_scale.yaml b/homeassistant/components/powerfox/quality_scale.yaml index 7e104b894ca..f72d25c3684 100644 --- a/homeassistant/components/powerfox/quality_scale.yaml +++ b/homeassistant/components/powerfox/quality_scale.yaml @@ -80,7 +80,7 @@ rules: status: exempt comment: | There is no need for icon translations. - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: | diff --git a/homeassistant/components/powerfox/strings.json b/homeassistant/components/powerfox/strings.json index 3eab77494d3..4a7c8e8fa4d 100644 --- a/homeassistant/components/powerfox/strings.json +++ b/homeassistant/components/powerfox/strings.json @@ -21,6 +21,18 @@ "data_description": { "password": "[%key:component::powerfox::config::step::user::data_description::password%]" } + }, + "reconfigure": { + "title": "Reconfigure your Powerfox account", + "description": "Powerfox is already configured. Would you like to reconfigure it?", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "[%key:component::powerfox::config::step::user::data_description::email%]", + "password": "[%key:component::powerfox::config::step::user::data_description::password%]" + } } }, "error": { @@ -29,7 +41,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "entity": { diff --git a/tests/components/powerfox/test_config_flow.py b/tests/components/powerfox/test_config_flow.py index 759092aee6e..a38f316faf3 100644 --- a/tests/components/powerfox/test_config_flow.py +++ b/tests/components/powerfox/test_config_flow.py @@ -110,6 +110,32 @@ async def test_duplicate_entry( assert result.get("reason") == "already_configured" +async def test_duplicate_entry_reconfiguration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_powerfox_client: AsyncMock, +) -> None: + """Test abort when setting up duplicate entry on reconfiguration.""" + # Add two config entries + mock_config_entry.add_to_hass(hass) + mock_config_entry_2 = MockConfigEntry( + domain=DOMAIN, + data={CONF_EMAIL: "new@powerfox.test", CONF_PASSWORD: "new-password"}, + ) + mock_config_entry_2.add_to_hass(hass) + assert len(hass.config_entries.async_entries()) == 2 + + # Reconfigure the second entry + result = await mock_config_entry_2.start_reconfigure_flow(hass) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@powerfox.test", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + @pytest.mark.parametrize( ("exception", "error"), [ @@ -216,3 +242,82 @@ async def test_step_reauth_exceptions( assert len(hass.config_entries.async_entries()) == 1 assert mock_config_entry.data[CONF_PASSWORD] == "new-password" + + +async def test_reconfigure( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfiguration of existing entry.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_EMAIL: "new-email@powerfox.test", + CONF_PASSWORD: "new-password", + }, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reconfigure_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_EMAIL] == "new-email@powerfox.test" + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (PowerfoxConnectionError, "cannot_connect"), + (PowerfoxAuthenticationError, "invalid_auth"), + ], +) +async def test_reconfigure_exceptions( + hass: HomeAssistant, + mock_powerfox_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test exceptions during reconfiguration flow.""" + mock_powerfox_client.all_devices.side_effect = exception + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_EMAIL: "new-email@powerfox.test", + CONF_PASSWORD: "new-password", + }, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + # Recover from error + mock_powerfox_client.all_devices.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_EMAIL: "new-email@powerfox.test", + CONF_PASSWORD: "new-password", + }, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "reconfigure_successful" + + assert len(hass.config_entries.async_entries()) == 1 + assert mock_config_entry.data[CONF_EMAIL] == "new-email@powerfox.test" + assert mock_config_entry.data[CONF_PASSWORD] == "new-password" From 000667248987600bd552e14e85c48c610e3d1d1d Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Thu, 12 Dec 2024 11:39:55 +0100 Subject: [PATCH 107/677] Improve diagnostics code of EnergyZero integration (#133019) --- .../components/energyzero/diagnostics.py | 27 ++++++++++--------- .../components/energyzero/services.py | 12 +++++---- 2 files changed, 22 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/energyzero/diagnostics.py b/homeassistant/components/energyzero/diagnostics.py index ee1286598e6..e6116eac259 100644 --- a/homeassistant/components/energyzero/diagnostics.py +++ b/homeassistant/components/energyzero/diagnostics.py @@ -33,25 +33,28 @@ async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: EnergyZeroConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" + coordinator_data = entry.runtime_data.data + energy_today = coordinator_data.energy_today + return { "entry": { "title": entry.title, }, "energy": { - "current_hour_price": entry.runtime_data.data.energy_today.current_price, - "next_hour_price": entry.runtime_data.data.energy_today.price_at_time( - entry.runtime_data.data.energy_today.utcnow() + timedelta(hours=1) + "current_hour_price": energy_today.current_price, + "next_hour_price": energy_today.price_at_time( + energy_today.utcnow() + timedelta(hours=1) ), - "average_price": entry.runtime_data.data.energy_today.average_price, - "max_price": entry.runtime_data.data.energy_today.extreme_prices[1], - "min_price": entry.runtime_data.data.energy_today.extreme_prices[0], - "highest_price_time": entry.runtime_data.data.energy_today.highest_price_time, - "lowest_price_time": entry.runtime_data.data.energy_today.lowest_price_time, - "percentage_of_max": entry.runtime_data.data.energy_today.pct_of_max_price, - "hours_priced_equal_or_lower": entry.runtime_data.data.energy_today.hours_priced_equal_or_lower, + "average_price": energy_today.average_price, + "max_price": energy_today.extreme_prices[1], + "min_price": energy_today.extreme_prices[0], + "highest_price_time": energy_today.highest_price_time, + "lowest_price_time": energy_today.lowest_price_time, + "percentage_of_max": energy_today.pct_of_max_price, + "hours_priced_equal_or_lower": energy_today.hours_priced_equal_or_lower, }, "gas": { - "current_hour_price": get_gas_price(entry.runtime_data.data, 0), - "next_hour_price": get_gas_price(entry.runtime_data.data, 1), + "current_hour_price": get_gas_price(coordinator_data, 0), + "next_hour_price": get_gas_price(coordinator_data, 1), }, } diff --git a/homeassistant/components/energyzero/services.py b/homeassistant/components/energyzero/services.py index b281274575e..ba2bbf0573f 100644 --- a/homeassistant/components/energyzero/services.py +++ b/homeassistant/components/energyzero/services.py @@ -5,12 +5,12 @@ from __future__ import annotations from datetime import date, datetime from enum import Enum from functools import partial -from typing import Final +from typing import TYPE_CHECKING, Final from energyzero import Electricity, Gas, VatOption import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import ( HomeAssistant, ServiceCall, @@ -22,6 +22,9 @@ from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import selector from homeassistant.util import dt as dt_util +if TYPE_CHECKING: + from . import EnergyZeroConfigEntry + from .const import DOMAIN from .coordinator import EnergyZeroDataUpdateCoordinator @@ -88,7 +91,7 @@ def __get_coordinator( ) -> EnergyZeroDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] - entry: ConfigEntry | None = hass.config_entries.async_get_entry(entry_id) + entry: EnergyZeroConfigEntry | None = hass.config_entries.async_get_entry(entry_id) if not entry: raise ServiceValidationError( @@ -107,8 +110,7 @@ def __get_coordinator( }, ) - coordinator: EnergyZeroDataUpdateCoordinator = entry.runtime_data - return coordinator + return entry.runtime_data async def __get_prices( From ded7cee6e57b73e9cda05ba97db322686b363628 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Thu, 12 Dec 2024 05:42:00 -0500 Subject: [PATCH 108/677] fix AndroidTV logging when disconnected (#132919) --- .../components/androidtv/__init__.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/androidtv/__init__.py b/homeassistant/components/androidtv/__init__.py index 44e4c54b560..4ffa0e24777 100644 --- a/homeassistant/components/androidtv/__init__.py +++ b/homeassistant/components/androidtv/__init__.py @@ -135,15 +135,16 @@ async def async_connect_androidtv( ) aftv = await async_androidtv_setup( - config[CONF_HOST], - config[CONF_PORT], - adbkey, - config.get(CONF_ADB_SERVER_IP), - config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT), - state_detection_rules, - config[CONF_DEVICE_CLASS], - timeout, - signer, + host=config[CONF_HOST], + port=config[CONF_PORT], + adbkey=adbkey, + adb_server_ip=config.get(CONF_ADB_SERVER_IP), + adb_server_port=config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT), + state_detection_rules=state_detection_rules, + device_class=config[CONF_DEVICE_CLASS], + auth_timeout_s=timeout, + signer=signer, + log_errors=False, ) if not aftv.available: From 52491bb75eafa9fc3edf068e1907851fb6fff87e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 12:52:01 +0100 Subject: [PATCH 109/677] Migrate tplink light tests to use Kelvin (#133026) --- tests/components/tplink/test_light.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index 6998d8fbcc7..b7f4ed6b8f4 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -26,8 +26,8 @@ from homeassistant.components.light import ( ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, @@ -153,8 +153,8 @@ async def test_color_light( assert attributes[ATTR_COLOR_MODE] == "brightness" else: assert attributes[ATTR_COLOR_MODE] == "hs" - assert attributes[ATTR_MIN_MIREDS] == 111 - assert attributes[ATTR_MAX_MIREDS] == 250 + assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 4000 + assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 9000 assert attributes[ATTR_HS_COLOR] == (10, 30) assert attributes[ATTR_RGB_COLOR] == (255, 191, 178) assert attributes[ATTR_XY_COLOR] == (0.42, 0.336) @@ -307,8 +307,8 @@ async def test_color_temp_light( assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] else: assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp"] - assert attributes[ATTR_MIN_MIREDS] == 111 - assert attributes[ATTR_MAX_MIREDS] == 250 + assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 9000 + assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 4000 assert attributes[ATTR_COLOR_TEMP_KELVIN] == 4000 await hass.services.async_call( From f2aaf2ac4abe6722763cd57d905f158b5464b13e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 12:55:25 +0100 Subject: [PATCH 110/677] Small test cleanups in Twente Milieu (#133028) --- .../snapshots/test_config_flow.ambr | 93 ------------------- .../twentemilieu/test_config_flow.py | 85 ++++++++++++----- 2 files changed, 63 insertions(+), 115 deletions(-) delete mode 100644 tests/components/twentemilieu/snapshots/test_config_flow.ambr diff --git a/tests/components/twentemilieu/snapshots/test_config_flow.ambr b/tests/components/twentemilieu/snapshots/test_config_flow.ambr deleted file mode 100644 index a98119e81c9..00000000000 --- a/tests/components/twentemilieu/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,93 +0,0 @@ -# serializer version: 1 -# name: test_full_user_flow - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': '12345', - }), - 'data': dict({ - 'house_letter': 'A', - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'twentemilieu', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'house_letter': 'A', - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'twentemilieu', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': '12345', - 'unique_id': '12345', - 'version': 1, - }), - 'title': '12345', - 'type': , - 'version': 1, - }) -# --- -# name: test_invalid_address - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': '12345', - }), - 'data': dict({ - 'house_letter': None, - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'twentemilieu', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'house_letter': None, - 'house_number': '1', - 'id': 12345, - 'post_code': '1234AB', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'twentemilieu', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': '12345', - 'unique_id': '12345', - 'version': 1, - }), - 'title': '12345', - 'type': , - 'version': 1, - }) -# --- diff --git a/tests/components/twentemilieu/test_config_flow.py b/tests/components/twentemilieu/test_config_flow.py index dbc01c69acb..6dc261b8769 100644 --- a/tests/components/twentemilieu/test_config_flow.py +++ b/tests/components/twentemilieu/test_config_flow.py @@ -3,7 +3,6 @@ from unittest.mock import MagicMock import pytest -from syrupy.assertion import SnapshotAssertion from twentemilieu import TwenteMilieuAddressError, TwenteMilieuConnectionError from homeassistant import config_entries @@ -15,6 +14,7 @@ from homeassistant.components.twentemilieu.const import ( DOMAIN, ) from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -24,16 +24,16 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.usefixtures("mock_twentemilieu") -async def test_full_user_flow(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: +async def test_full_user_flow(hass: HomeAssistant) -> None: """Test registering an integration and finishing flow works.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_POST_CODE: "1234AB", @@ -42,14 +42,22 @@ async def test_full_user_flow(hass: HomeAssistant, snapshot: SnapshotAssertion) }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "12345" + assert config_entry.data == { + CONF_HOUSE_LETTER: "A", + CONF_HOUSE_NUMBER: "1", + CONF_ID: 12345, + CONF_POST_CODE: "1234AB", + } + assert not config_entry.options async def test_invalid_address( hass: HomeAssistant, mock_twentemilieu: MagicMock, - snapshot: SnapshotAssertion, ) -> None: """Test full user flow when the user enters an incorrect address. @@ -60,11 +68,11 @@ async def test_invalid_address( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" mock_twentemilieu.unique_id.side_effect = TwenteMilieuAddressError - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_POST_CODE: "1234", @@ -72,12 +80,12 @@ async def test_invalid_address( }, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "user" - assert result2.get("errors") == {"base": "invalid_address"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "invalid_address"} mock_twentemilieu.unique_id.side_effect = None - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_POST_CODE: "1234AB", @@ -85,8 +93,17 @@ async def test_invalid_address( }, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "12345" + assert config_entry.data == { + CONF_HOUSE_LETTER: None, + CONF_HOUSE_NUMBER: "1", + CONF_ID: 12345, + CONF_POST_CODE: "1234AB", + } + assert not config_entry.options async def test_connection_error( @@ -106,9 +123,33 @@ async def test_connection_error( }, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert result.get("errors") == {"base": "cannot_connect"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + + # Recover from error + mock_twentemilieu.unique_id.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_POST_CODE: "1234AB", + CONF_HOUSE_NUMBER: "1", + CONF_HOUSE_LETTER: "A", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "12345" + assert config_entry.data == { + CONF_HOUSE_LETTER: "A", + CONF_HOUSE_NUMBER: "1", + CONF_ID: 12345, + CONF_POST_CODE: "1234AB", + } + assert not config_entry.options @pytest.mark.usefixtures("mock_twentemilieu") @@ -128,5 +169,5 @@ async def test_address_already_set_up( }, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From 85d4572a17a5d6100e37455befa7dfe6afb619c8 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 12 Dec 2024 13:41:56 +0100 Subject: [PATCH 111/677] Adjust backup agent platform (#132944) * Adjust backup agent platform * Adjust according to discussion * Clean up the local agent dict too * Add test * Update kitchen_sink * Apply suggestions from code review Co-authored-by: Martin Hjelmare * Adjust tests * Clean up * Fix kitchen sink reload --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/backup/agent.py | 23 +++- homeassistant/components/backup/backup.py | 3 +- homeassistant/components/backup/manager.py | 41 +++++-- homeassistant/components/cloud/backup.py | 7 +- homeassistant/components/hassio/backup.py | 2 + .../components/kitchen_sink/__init__.py | 21 +++- .../components/kitchen_sink/backup.py | 27 ++++- .../components/kitchen_sink/const.py | 12 ++ tests/components/backup/common.py | 2 + tests/components/backup/test_manager.py | 103 +++++++++++++++--- tests/components/cloud/test_backup.py | 5 +- tests/components/kitchen_sink/test_backup.py | 21 ++++ 12 files changed, 235 insertions(+), 32 deletions(-) create mode 100644 homeassistant/components/kitchen_sink/const.py diff --git a/homeassistant/components/backup/agent.py b/homeassistant/components/backup/agent.py index 36f2e7ee34e..44bc9b298e8 100644 --- a/homeassistant/components/backup/agent.py +++ b/homeassistant/components/backup/agent.py @@ -7,7 +7,9 @@ from collections.abc import AsyncIterator, Callable, Coroutine from pathlib import Path from typing import Any, Protocol -from homeassistant.core import HomeAssistant +from propcache import cached_property + +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from .models import AgentBackup @@ -26,8 +28,14 @@ class BackupAgentUnreachableError(BackupAgentError): class BackupAgent(abc.ABC): """Backup agent interface.""" + domain: str name: str + @cached_property + def agent_id(self) -> str: + """Return the agent_id.""" + return f"{self.domain}.{self.name}" + @abc.abstractmethod async def async_download_backup( self, @@ -98,3 +106,16 @@ class BackupAgentPlatformProtocol(Protocol): **kwargs: Any, ) -> list[BackupAgent]: """Return a list of backup agents.""" + + @callback + def async_register_backup_agents_listener( + self, + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, + ) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed. + + :return: A function to unregister the listener. + """ diff --git a/homeassistant/components/backup/backup.py b/homeassistant/components/backup/backup.py index b9aad89c7f3..ef4924161c2 100644 --- a/homeassistant/components/backup/backup.py +++ b/homeassistant/components/backup/backup.py @@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.hassio import is_hassio from .agent import BackupAgent, LocalBackupAgent -from .const import LOGGER +from .const import DOMAIN, LOGGER from .models import AgentBackup from .util import read_backup @@ -30,6 +30,7 @@ async def async_get_backup_agents( class CoreLocalBackupAgent(LocalBackupAgent): """Local backup agent for Core and Container installations.""" + domain = DOMAIN name = "local" def __init__(self, hass: HomeAssistant) -> None: diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 1defbd350fb..66977e568e4 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -243,6 +243,7 @@ class BackupManager: """Initialize the backup manager.""" self.hass = hass self.platforms: dict[str, BackupPlatformProtocol] = {} + self.backup_agent_platforms: dict[str, BackupAgentPlatformProtocol] = {} self.backup_agents: dict[str, BackupAgent] = {} self.local_backup_agents: dict[str, LocalBackupAgent] = {} @@ -291,22 +292,48 @@ class BackupManager: self.platforms[integration_domain] = platform - async def _async_add_platform_agents( + @callback + def _async_add_backup_agent_platform( self, integration_domain: str, platform: BackupAgentPlatformProtocol, ) -> None: - """Add a platform to the backup manager.""" + """Add backup agent platform to the backup manager.""" if not hasattr(platform, "async_get_backup_agents"): return + self.backup_agent_platforms[integration_domain] = platform + + @callback + def listener() -> None: + LOGGER.debug("Loading backup agents for %s", integration_domain) + self.hass.async_create_task( + self._async_reload_backup_agents(integration_domain) + ) + + if hasattr(platform, "async_register_backup_agents_listener"): + platform.async_register_backup_agents_listener(self.hass, listener=listener) + + listener() + + async def _async_reload_backup_agents(self, domain: str) -> None: + """Add backup agent platform to the backup manager.""" + platform = self.backup_agent_platforms[domain] + + # Remove all agents for the domain + for agent_id in list(self.backup_agents): + if self.backup_agents[agent_id].domain == domain: + del self.backup_agents[agent_id] + for agent_id in list(self.local_backup_agents): + if self.local_backup_agents[agent_id].domain == domain: + del self.local_backup_agents[agent_id] + + # Add new agents agents = await platform.async_get_backup_agents(self.hass) - self.backup_agents.update( - {f"{integration_domain}.{agent.name}": agent for agent in agents} - ) + self.backup_agents.update({agent.agent_id: agent for agent in agents}) self.local_backup_agents.update( { - f"{integration_domain}.{agent.name}": agent + agent.agent_id: agent for agent in agents if isinstance(agent, LocalBackupAgent) } @@ -320,7 +347,7 @@ class BackupManager: ) -> None: """Add a backup platform manager.""" self._add_platform_pre_post_handler(integration_domain, platform) - await self._async_add_platform_agents(integration_domain, platform) + self._async_add_backup_agent_platform(integration_domain, platform) LOGGER.debug("Backup platform %s loaded", integration_domain) LOGGER.debug("%s platforms loaded in total", len(self.platforms)) LOGGER.debug("%s agents loaded in total", len(self.backup_agents)) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index 58ecc7a78fd..2c7cc9d7bd5 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -38,7 +38,11 @@ async def async_get_backup_agents( **kwargs: Any, ) -> list[BackupAgent]: """Return the cloud backup agent.""" - return [CloudBackupAgent(hass=hass, cloud=hass.data[DATA_CLOUD])] + cloud = hass.data[DATA_CLOUD] + if not cloud.is_logged_in: + return [] + + return [CloudBackupAgent(hass=hass, cloud=cloud)] class ChunkAsyncStreamIterator: @@ -69,6 +73,7 @@ class ChunkAsyncStreamIterator: class CloudBackupAgent(BackupAgent): """Cloud backup agent.""" + domain = DOMAIN name = DOMAIN def __init__(self, hass: HomeAssistant, cloud: Cloud[CloudClient]) -> None: diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index f7f66f6cecc..53f3a226a09 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -79,6 +79,8 @@ def _backup_details_to_agent_backup( class SupervisorBackupAgent(BackupAgent): """Backup agent for supervised installations.""" + domain = DOMAIN + def __init__(self, hass: HomeAssistant, name: str, location: str | None) -> None: """Initialize the backup agent.""" super().__init__() diff --git a/homeassistant/components/kitchen_sink/__init__.py b/homeassistant/components/kitchen_sink/__init__.py index 2c3887bb383..88d0c868636 100644 --- a/homeassistant/components/kitchen_sink/__init__.py +++ b/homeassistant/components/kitchen_sink/__init__.py @@ -26,8 +26,7 @@ from homeassistant.helpers.issue_registry import IssueSeverity, async_create_iss from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util -DOMAIN = "kitchen_sink" - +from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN COMPONENTS_WITH_DEMO_PLATFORM = [ Platform.BUTTON, @@ -88,9 +87,27 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b # Start a reauth flow config_entry.async_start_reauth(hass) + # Notify backup listeners + hass.async_create_task(_notify_backup_listeners(hass), eager_start=False) + return True +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload config entry.""" + # Notify backup listeners + hass.async_create_task(_notify_backup_listeners(hass), eager_start=False) + + return await hass.config_entries.async_unload_platforms( + entry, COMPONENTS_WITH_DEMO_PLATFORM + ) + + +async def _notify_backup_listeners(hass: HomeAssistant) -> None: + for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []): + listener() + + def _create_issues(hass: HomeAssistant) -> None: """Create some issue registry issues.""" async_create_issue( diff --git a/homeassistant/components/kitchen_sink/backup.py b/homeassistant/components/kitchen_sink/backup.py index 02c61ff4de6..615364f55ee 100644 --- a/homeassistant/components/kitchen_sink/backup.py +++ b/homeassistant/components/kitchen_sink/backup.py @@ -8,7 +8,9 @@ import logging from typing import Any from homeassistant.components.backup import AddonInfo, AgentBackup, BackupAgent, Folder -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback + +from . import DATA_BACKUP_AGENT_LISTENERS, DOMAIN LOGGER = logging.getLogger(__name__) @@ -17,12 +19,35 @@ async def async_get_backup_agents( hass: HomeAssistant, ) -> list[BackupAgent]: """Register the backup agents.""" + if not hass.config_entries.async_loaded_entries(DOMAIN): + LOGGER.info("No config entry found or entry is not loaded") + return [] return [KitchenSinkBackupAgent("syncer")] +@callback +def async_register_backup_agents_listener( + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, +) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed.""" + hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener) + + @callback + def remove_listener() -> None: + """Remove the listener.""" + hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener) + + return remove_listener + + class KitchenSinkBackupAgent(BackupAgent): """Kitchen sink backup agent.""" + domain = DOMAIN + def __init__(self, name: str) -> None: """Initialize the kitchen sink backup sync agent.""" super().__init__() diff --git a/homeassistant/components/kitchen_sink/const.py b/homeassistant/components/kitchen_sink/const.py new file mode 100644 index 00000000000..e6edaca46ce --- /dev/null +++ b/homeassistant/components/kitchen_sink/const.py @@ -0,0 +1,12 @@ +"""Constants for the Kitchen Sink integration.""" + +from __future__ import annotations + +from collections.abc import Callable + +from homeassistant.util.hass_dict import HassKey + +DOMAIN = "kitchen_sink" +DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey( + f"{DOMAIN}.backup_agent_listeners" +) diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index 133a2602192..b06b8a5ef5d 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -57,6 +57,8 @@ TEST_DOMAIN = "test" class BackupAgentTest(BackupAgent): """Test backup agent.""" + domain = "test" + def __init__(self, name: str, backups: list[AgentBackup] | None = None) -> None: """Initialize the backup agent.""" self.name = name diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index f335ea5c0ee..302f4e07011 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -6,6 +6,7 @@ import asyncio from collections.abc import Generator from io import StringIO import json +from pathlib import Path from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, call, mock_open, patch @@ -18,6 +19,7 @@ from homeassistant.components.backup import ( BackupManager, BackupPlatformProtocol, Folder, + LocalBackupAgent, backup as local_backup_platform, ) from homeassistant.components.backup.const import DATA_MANAGER @@ -235,14 +237,14 @@ async def test_async_initiate_backup( core_get_backup_agents.return_value = [local_agent] await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - await _setup_backup_platform( - hass, - domain="test", - platform=Mock( - async_get_backup_agents=AsyncMock(return_value=[remote_agent]), - spec_set=BackupAgentPlatformProtocol, - ), - ) + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) ws_client = await hass_ws_client(hass) @@ -402,14 +404,14 @@ async def test_async_initiate_backup_with_agent_error( core_get_backup_agents.return_value = [local_agent] await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - await _setup_backup_platform( - hass, - domain="test", - platform=Mock( - async_get_backup_agents=AsyncMock(return_value=[remote_agent]), - spec_set=BackupAgentPlatformProtocol, - ), - ) + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) ws_client = await hass_ws_client(hass) @@ -534,21 +536,86 @@ async def test_loading_platforms( assert not manager.platforms + get_agents_mock = AsyncMock(return_value=[]) + await _setup_backup_platform( hass, platform=Mock( async_pre_backup=AsyncMock(), async_post_backup=AsyncMock(), - async_get_backup_agents=AsyncMock(), + async_get_backup_agents=get_agents_mock, ), ) await manager.load_platforms() await hass.async_block_till_done() assert len(manager.platforms) == 1 - assert "Loaded 1 platforms" in caplog.text + get_agents_mock.assert_called_once_with(hass) + + +class LocalBackupAgentTest(BackupAgentTest, LocalBackupAgent): + """Local backup agent.""" + + def get_backup_path(self, backup_id: str) -> Path: + """Return the local path to a backup.""" + return "test.tar" + + +@pytest.mark.parametrize( + ("agent_class", "num_local_agents"), + [(LocalBackupAgentTest, 2), (BackupAgentTest, 1)], +) +async def test_loading_platform_with_listener( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + agent_class: type[BackupAgentTest], + num_local_agents: int, +) -> None: + """Test loading a backup agent platform which can be listened to.""" + ws_client = await hass_ws_client(hass) + assert await async_setup_component(hass, DOMAIN, {}) + manager = hass.data[DATA_MANAGER] + + get_agents_mock = AsyncMock(return_value=[agent_class("remote1", backups=[])]) + register_listener_mock = Mock() + + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=get_agents_mock, + async_register_backup_agents_listener=register_listener_mock, + ), + ) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id({"type": "backup/agents/info"}) + resp = await ws_client.receive_json() + assert resp["result"]["agents"] == [ + {"agent_id": "backup.local"}, + {"agent_id": "test.remote1"}, + ] + assert len(manager.local_backup_agents) == num_local_agents + + get_agents_mock.assert_called_once_with(hass) + register_listener_mock.assert_called_once_with(hass, listener=ANY) + + get_agents_mock.reset_mock() + get_agents_mock.return_value = [agent_class("remote2", backups=[])] + listener = register_listener_mock.call_args[1]["listener"] + listener() + + get_agents_mock.assert_called_once_with(hass) + await ws_client.send_json_auto_id({"type": "backup/agents/info"}) + resp = await ws_client.receive_json() + assert resp["result"]["agents"] == [ + {"agent_id": "backup.local"}, + {"agent_id": "test.remote2"}, + ] + assert len(manager.local_backup_agents) == num_local_agents + @pytest.mark.parametrize( "platform_mock", diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 16b446c7a2b..d5dc8751d82 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -26,7 +26,10 @@ from tests.typing import ClientSessionGenerator, MagicMock, WebSocketGenerator @pytest.fixture(autouse=True) async def setup_integration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, cloud: MagicMock + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + cloud: MagicMock, + cloud_logged_in: None, ) -> AsyncGenerator[None]: """Set up cloud integration.""" with patch("homeassistant.components.backup.is_hassio", return_value=False): diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py index 7db03b7fa46..6a738094ae6 100644 --- a/tests/components/kitchen_sink/test_backup.py +++ b/tests/components/kitchen_sink/test_backup.py @@ -57,6 +57,27 @@ async def test_agents_info( "agents": [{"agent_id": "backup.local"}, {"agent_id": "kitchen_sink.syncer"}], } + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == {"agents": [{"agent_id": "backup.local"}]} + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + + assert response["success"] + assert response["result"] == { + "agents": [{"agent_id": "backup.local"}, {"agent_id": "kitchen_sink.syncer"}], + } + async def test_agents_list_backups( hass: HomeAssistant, From 5c80ddb89160e84be136e5d42b9edce3c050f277 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 13:49:17 +0100 Subject: [PATCH 112/677] Fix LaMetric config flow for cloud import path (#133039) --- homeassistant/components/lametric/config_flow.py | 5 ++++- homeassistant/components/lametric/strings.json | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/lametric/config_flow.py b/homeassistant/components/lametric/config_flow.py index 36dcdf26ed6..05c5dea77d1 100644 --- a/homeassistant/components/lametric/config_flow.py +++ b/homeassistant/components/lametric/config_flow.py @@ -249,7 +249,10 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): device = await lametric.device() if self.source != SOURCE_REAUTH: - await self.async_set_unique_id(device.serial_number) + await self.async_set_unique_id( + device.serial_number, + raise_on_progress=False, + ) self._abort_if_unique_id_configured( updates={CONF_HOST: lametric.host, CONF_API_KEY: lametric.api_key} ) diff --git a/homeassistant/components/lametric/strings.json b/homeassistant/components/lametric/strings.json index 87bda01e305..0fd6f5a12dc 100644 --- a/homeassistant/components/lametric/strings.json +++ b/homeassistant/components/lametric/strings.json @@ -21,8 +21,11 @@ "api_key": "You can find this API key in [devices page in your LaMetric developer account](https://developer.lametric.com/user/devices)." } }, - "user_cloud_select_device": { + "cloud_select_device": { "data": { + "device": "Device" + }, + "data_description": { "device": "Select the LaMetric device to add" } } From 7bdf034b93f9c5fbb97b46652ec509186869ffa5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 13:54:22 +0100 Subject: [PATCH 113/677] Migrate template light tests to use Kelvin (#133025) --- tests/components/template/test_light.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/template/test_light.py b/tests/components/template/test_light.py index 065a1488dc9..b5ba93a4bd0 100644 --- a/tests/components/template/test_light.py +++ b/tests/components/template/test_light.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components import light from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -773,7 +773,7 @@ async def test_temperature_action_no_template( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP: 345}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP_KELVIN: 2898}, blocking=True, ) @@ -1395,7 +1395,7 @@ async def test_all_colors_mode_no_template( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP: 123}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP_KELVIN: 8130}, blocking=True, ) @@ -1531,7 +1531,7 @@ async def test_all_colors_mode_no_template( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP: 234}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_COLOR_TEMP_KELVIN: 4273}, blocking=True, ) From 6005b6d01ca46e89a8350d3633f07aac9f620c15 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 13:55:57 +0100 Subject: [PATCH 114/677] Explicitly pass config entry to coordinator in Elgato (#133014) * Explicitly pass config entry to coordinator in Elgato * Make it noice! * Apply suggestions from code review Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Adjustment from review comment --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/elgato/__init__.py | 9 +++------ homeassistant/components/elgato/button.py | 5 ++--- homeassistant/components/elgato/coordinator.py | 7 +++++-- homeassistant/components/elgato/diagnostics.py | 4 ++-- homeassistant/components/elgato/light.py | 5 ++--- homeassistant/components/elgato/sensor.py | 5 ++--- homeassistant/components/elgato/switch.py | 5 ++--- 7 files changed, 18 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/elgato/__init__.py b/homeassistant/components/elgato/__init__.py index 2d8446c3b76..1b1ff9948c9 100644 --- a/homeassistant/components/elgato/__init__.py +++ b/homeassistant/components/elgato/__init__.py @@ -1,17 +1,14 @@ """Support for Elgato Lights.""" -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .coordinator import ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator PLATFORMS = [Platform.BUTTON, Platform.LIGHT, Platform.SENSOR, Platform.SWITCH] -type ElgatorConfigEntry = ConfigEntry[ElgatoDataUpdateCoordinator] - -async def async_setup_entry(hass: HomeAssistant, entry: ElgatorConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ElgatoConfigEntry) -> bool: """Set up Elgato Light from a config entry.""" coordinator = ElgatoDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() @@ -22,6 +19,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElgatorConfigEntry) -> b return True -async def async_unload_entry(hass: HomeAssistant, entry: ElgatorConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ElgatoConfigEntry) -> bool: """Unload Elgato Light config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/elgato/button.py b/homeassistant/components/elgato/button.py index 6f9436b8e29..505eff36b44 100644 --- a/homeassistant/components/elgato/button.py +++ b/homeassistant/components/elgato/button.py @@ -18,8 +18,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElgatorConfigEntry -from .coordinator import ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity PARALLEL_UPDATES = 1 @@ -50,7 +49,7 @@ BUTTONS = [ async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato button based on a config entry.""" diff --git a/homeassistant/components/elgato/coordinator.py b/homeassistant/components/elgato/coordinator.py index f3cf9216374..5e1ba0a6494 100644 --- a/homeassistant/components/elgato/coordinator.py +++ b/homeassistant/components/elgato/coordinator.py @@ -12,6 +12,8 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN, LOGGER, SCAN_INTERVAL +type ElgatoConfigEntry = ConfigEntry[ElgatoDataUpdateCoordinator] + @dataclass class ElgatoData: @@ -26,10 +28,10 @@ class ElgatoData: class ElgatoDataUpdateCoordinator(DataUpdateCoordinator[ElgatoData]): """Class to manage fetching Elgato data.""" - config_entry: ConfigEntry + config_entry: ElgatoConfigEntry has_battery: bool | None = None - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, entry: ElgatoConfigEntry) -> None: """Initialize the coordinator.""" self.config_entry = entry self.client = Elgato( @@ -39,6 +41,7 @@ class ElgatoDataUpdateCoordinator(DataUpdateCoordinator[ElgatoData]): super().__init__( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_{entry.data[CONF_HOST]}", update_interval=SCAN_INTERVAL, ) diff --git a/homeassistant/components/elgato/diagnostics.py b/homeassistant/components/elgato/diagnostics.py index ac3ea0a155d..4e1b9d4cfdd 100644 --- a/homeassistant/components/elgato/diagnostics.py +++ b/homeassistant/components/elgato/diagnostics.py @@ -6,11 +6,11 @@ from typing import Any from homeassistant.core import HomeAssistant -from . import ElgatorConfigEntry +from .coordinator import ElgatoConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ElgatorConfigEntry + hass: HomeAssistant, entry: ElgatoConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" coordinator = entry.runtime_data diff --git a/homeassistant/components/elgato/light.py b/homeassistant/components/elgato/light.py index 9a85c572e2c..990a0606fce 100644 --- a/homeassistant/components/elgato/light.py +++ b/homeassistant/components/elgato/light.py @@ -21,9 +21,8 @@ from homeassistant.helpers.entity_platform import ( ) from homeassistant.util import color as color_util -from . import ElgatorConfigEntry from .const import SERVICE_IDENTIFY -from .coordinator import ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity PARALLEL_UPDATES = 1 @@ -31,7 +30,7 @@ PARALLEL_UPDATES = 1 async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato Light based on a config entry.""" diff --git a/homeassistant/components/elgato/sensor.py b/homeassistant/components/elgato/sensor.py index a28ee01f505..529d2f7c76e 100644 --- a/homeassistant/components/elgato/sensor.py +++ b/homeassistant/components/elgato/sensor.py @@ -21,8 +21,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElgatorConfigEntry -from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity # Coordinator is used to centralize the data updates @@ -104,7 +103,7 @@ SENSORS = [ async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato sensor based on a config entry.""" diff --git a/homeassistant/components/elgato/switch.py b/homeassistant/components/elgato/switch.py index 643f148ec7d..3b2420b0ace 100644 --- a/homeassistant/components/elgato/switch.py +++ b/homeassistant/components/elgato/switch.py @@ -14,8 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import ElgatorConfigEntry -from .coordinator import ElgatoData, ElgatoDataUpdateCoordinator +from .coordinator import ElgatoConfigEntry, ElgatoData, ElgatoDataUpdateCoordinator from .entity import ElgatoEntity PARALLEL_UPDATES = 1 @@ -54,7 +53,7 @@ SWITCHES = [ async def async_setup_entry( hass: HomeAssistant, - entry: ElgatorConfigEntry, + entry: ElgatoConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Elgato switches based on a config entry.""" From bcaf1dc20b5035564b0d0e2815bff77e094238e6 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 14:24:38 +0100 Subject: [PATCH 115/677] Clean up Elgato config flow tests (#133045) --- .../elgato/snapshots/test_config_flow.ambr | 128 ------------------ tests/components/elgato/test_config_flow.py | 94 +++++++++---- 2 files changed, 65 insertions(+), 157 deletions(-) delete mode 100644 tests/components/elgato/snapshots/test_config_flow.ambr diff --git a/tests/components/elgato/snapshots/test_config_flow.ambr b/tests/components/elgato/snapshots/test_config_flow.ambr deleted file mode 100644 index 522482ab602..00000000000 --- a/tests/components/elgato/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,128 +0,0 @@ -# serializer version: 1 -# name: test_full_user_flow_implementation - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': 'CN11A1A00001', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'mac': None, - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'elgato', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'mac': None, - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'elgato', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'CN11A1A00001', - 'unique_id': 'CN11A1A00001', - 'version': 1, - }), - 'title': 'CN11A1A00001', - 'type': , - 'version': 1, - }) -# --- -# name: test_full_zeroconf_flow_implementation - FlowResultSnapshot({ - 'context': dict({ - 'confirm_only': True, - 'source': 'zeroconf', - 'unique_id': 'CN11A1A00001', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'elgato', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'elgato', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'zeroconf', - 'title': 'CN11A1A00001', - 'unique_id': 'CN11A1A00001', - 'version': 1, - }), - 'title': 'CN11A1A00001', - 'type': , - 'version': 1, - }) -# --- -# name: test_zeroconf_during_onboarding - FlowResultSnapshot({ - 'context': dict({ - 'source': 'zeroconf', - 'unique_id': 'CN11A1A00001', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'elgato', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'mac': 'AA:BB:CC:DD:EE:FF', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'elgato', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'zeroconf', - 'title': 'CN11A1A00001', - 'unique_id': 'CN11A1A00001', - 'version': 1, - }), - 'title': 'CN11A1A00001', - 'type': , - 'version': 1, - }) -# --- diff --git a/tests/components/elgato/test_config_flow.py b/tests/components/elgato/test_config_flow.py index 42abc0cde63..00763f60458 100644 --- a/tests/components/elgato/test_config_flow.py +++ b/tests/components/elgato/test_config_flow.py @@ -5,12 +5,11 @@ from unittest.mock import AsyncMock, MagicMock from elgato import ElgatoConnectionError import pytest -from syrupy.assertion import SnapshotAssertion from homeassistant.components import zeroconf from homeassistant.components.elgato.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_HOST, CONF_SOURCE +from homeassistant.const import CONF_HOST, CONF_MAC, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -21,7 +20,6 @@ async def test_full_user_flow_implementation( hass: HomeAssistant, mock_elgato: MagicMock, mock_setup_entry: AsyncMock, - snapshot: SnapshotAssertion, ) -> None: """Test the full manual user flow from start to finish.""" result = await hass.config_entries.flow.async_init( @@ -29,15 +27,22 @@ async def test_full_user_flow_implementation( context={"source": SOURCE_USER}, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_HOST: "127.0.0.1"} ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_MAC: None, + } + assert not config_entry.options assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_elgato.info.mock_calls) == 1 @@ -47,7 +52,6 @@ async def test_full_zeroconf_flow_implementation( hass: HomeAssistant, mock_elgato: MagicMock, mock_setup_entry: AsyncMock, - snapshot: SnapshotAssertion, ) -> None: """Test the zeroconf flow from start to finish.""" result = await hass.config_entries.flow.async_init( @@ -64,9 +68,9 @@ async def test_full_zeroconf_flow_implementation( ), ) - assert result.get("description_placeholders") == {"serial_number": "CN11A1A00001"} - assert result.get("step_id") == "zeroconf_confirm" - assert result.get("type") is FlowResultType.FORM + assert result["description_placeholders"] == {"serial_number": "CN11A1A00001"} + assert result["step_id"] == "zeroconf_confirm" + assert result["type"] is FlowResultType.FORM progress = hass.config_entries.flow.async_progress() assert len(progress) == 1 @@ -74,12 +78,19 @@ async def test_full_zeroconf_flow_implementation( assert "context" in progress[0] assert progress[0]["context"].get("confirm_only") is True - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_MAC: "AA:BB:CC:DD:EE:FF", + } + assert not config_entry.options assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_elgato.info.mock_calls) == 1 @@ -97,9 +108,28 @@ async def test_connection_error( data={CONF_HOST: "127.0.0.1"}, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("errors") == {"base": "cannot_connect"} - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + assert result["step_id"] == "user" + + # Recover from error + mock_elgato.info.side_effect = None + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: "127.0.0.2"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.2", + CONF_MAC: None, + } + assert not config_entry.options async def test_zeroconf_connection_error( @@ -122,8 +152,8 @@ async def test_zeroconf_connection_error( ), ) - assert result.get("reason") == "cannot_connect" - assert result.get("type") is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + assert result["type"] is FlowResultType.ABORT @pytest.mark.usefixtures("mock_elgato") @@ -138,8 +168,8 @@ async def test_user_device_exists_abort( data={CONF_HOST: "127.0.0.1"}, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" @pytest.mark.usefixtures("mock_elgato") @@ -162,8 +192,8 @@ async def test_zeroconf_device_exists_abort( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" entries = hass.config_entries.async_entries(DOMAIN) assert entries[0].data[CONF_HOST] == "127.0.0.1" @@ -183,8 +213,8 @@ async def test_zeroconf_device_exists_abort( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" entries = hass.config_entries.async_entries(DOMAIN) assert entries[0].data[CONF_HOST] == "127.0.0.2" @@ -195,7 +225,6 @@ async def test_zeroconf_during_onboarding( mock_elgato: MagicMock, mock_setup_entry: AsyncMock, mock_onboarding: MagicMock, - snapshot: SnapshotAssertion, ) -> None: """Test the zeroconf creates an entry during onboarding.""" result = await hass.config_entries.flow.async_init( @@ -212,8 +241,15 @@ async def test_zeroconf_during_onboarding( ), ) - assert result.get("type") is FlowResultType.CREATE_ENTRY - assert result == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "CN11A1A00001" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_MAC: "AA:BB:CC:DD:EE:FF", + } + assert not config_entry.options assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_elgato.info.mock_calls) == 1 From c18cbf5994d6b22504e19d5d698d80f806137fc6 Mon Sep 17 00:00:00 2001 From: Krisjanis Lejejs Date: Thu, 12 Dec 2024 13:25:54 +0000 Subject: [PATCH 116/677] Bump hass-nabucasa from 0.86.0 to 0.87.0 (#133043) --- homeassistant/components/cloud/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 48f2153e86f..7ee8cf46b86 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -13,6 +13,6 @@ "integration_type": "system", "iot_class": "cloud_push", "loggers": ["hass_nabucasa"], - "requirements": ["hass-nabucasa==0.86.0"], + "requirements": ["hass-nabucasa==0.87.0"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index e4abf3ab678..e7d46787f5d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ fnv-hash-fast==1.0.2 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 habluetooth==3.6.0 -hass-nabucasa==0.86.0 +hass-nabucasa==0.87.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.7 diff --git a/pyproject.toml b/pyproject.toml index c40f8bd0d01..375e57126f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ dependencies = [ "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration - "hass-nabucasa==0.86.0", + "hass-nabucasa==0.87.0", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all "httpx==0.27.2", diff --git a/requirements.txt b/requirements.txt index 9ef9f0e44f2..e43822553f3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,7 +19,7 @@ bcrypt==4.2.0 certifi>=2021.5.30 ciso8601==2.3.2 fnv-hash-fast==1.0.2 -hass-nabucasa==0.86.0 +hass-nabucasa==0.87.0 httpx==0.27.2 home-assistant-bluetooth==1.13.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 26acf53fa53..fb873805873 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1088,7 +1088,7 @@ habitipy==0.3.3 habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.86.0 +hass-nabucasa==0.87.0 # homeassistant.components.splunk hass-splunk==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index afe7252f9f8..83e7c89dd8b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -926,7 +926,7 @@ habitipy==0.3.3 habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.86.0 +hass-nabucasa==0.87.0 # homeassistant.components.conversation hassil==2.0.5 From 2e133df549a3bc4fa67375882eb5824d6f6abe0b Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 12 Dec 2024 14:26:17 +0100 Subject: [PATCH 117/677] Improve husqvarna_automower decorator typing (#133047) --- .../components/husqvarna_automower/entity.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/entity.py b/homeassistant/components/husqvarna_automower/entity.py index fef0ba03b62..5b5156e5f1d 100644 --- a/homeassistant/components/husqvarna_automower/entity.py +++ b/homeassistant/components/husqvarna_automower/entity.py @@ -1,10 +1,12 @@ """Platform for Husqvarna Automower base entity.""" +from __future__ import annotations + import asyncio -from collections.abc import Awaitable, Callable, Coroutine +from collections.abc import Callable, Coroutine import functools import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Concatenate from aioautomower.exceptions import ApiException from aioautomower.model import MowerActivities, MowerAttributes, MowerStates, WorkArea @@ -52,18 +54,17 @@ def _work_area_translation_key(work_area_id: int, key: str) -> str: return f"work_area_{key}" -def handle_sending_exception( +type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], Coroutine[Any, Any, _R]] + + +def handle_sending_exception[_Entity: AutomowerBaseEntity, **_P]( poll_after_sending: bool = False, -) -> Callable[ - [Callable[..., Awaitable[Any]]], Callable[..., Coroutine[Any, Any, None]] -]: +) -> Callable[[_FuncType[_Entity, _P, Any]], _FuncType[_Entity, _P, None]]: """Handle exceptions while sending a command and optionally refresh coordinator.""" - def decorator( - func: Callable[..., Awaitable[Any]], - ) -> Callable[..., Coroutine[Any, Any, None]]: + def decorator(func: _FuncType[_Entity, _P, Any]) -> _FuncType[_Entity, _P, None]: @functools.wraps(func) - async def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: + async def wrapper(self: _Entity, *args: _P.args, **kwargs: _P.kwargs) -> None: try: await func(self, *args, **kwargs) except ApiException as exception: From 8e15287662fa70bc9eb76dad2326d2a6ace1d8f5 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 14:26:34 +0100 Subject: [PATCH 118/677] Add data descriptions to Twente Milieu config flow (#133046) --- homeassistant/components/twentemilieu/quality_scale.yaml | 5 +---- homeassistant/components/twentemilieu/strings.json | 5 +++++ 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/twentemilieu/quality_scale.yaml b/homeassistant/components/twentemilieu/quality_scale.yaml index 210416e56c5..3d7535a249c 100644 --- a/homeassistant/components/twentemilieu/quality_scale.yaml +++ b/homeassistant/components/twentemilieu/quality_scale.yaml @@ -8,10 +8,7 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: - status: todo - comment: | - data_description's are missing. + config-flow: done dependency-transparency: done docs-actions: status: exempt diff --git a/homeassistant/components/twentemilieu/strings.json b/homeassistant/components/twentemilieu/strings.json index 7797167ea0b..5c40df1b0c2 100644 --- a/homeassistant/components/twentemilieu/strings.json +++ b/homeassistant/components/twentemilieu/strings.json @@ -7,6 +7,11 @@ "post_code": "Postal code", "house_number": "House number", "house_letter": "House letter/additional" + }, + "data_description": { + "post_code": "The postal code of the address, for example 7500AA", + "house_number": "The house number of the address", + "house_letter": "The house letter or additional information of the address" } } }, From 4b5d717898c32712689d2534e33d9c2e79d90579 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 12 Dec 2024 14:35:11 +0100 Subject: [PATCH 119/677] Fix music_assistant decorator typing (#133044) --- .../components/music_assistant/media_player.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index 847a71b0061..7d09bd5b888 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -3,11 +3,11 @@ from __future__ import annotations import asyncio -from collections.abc import Awaitable, Callable, Coroutine, Mapping +from collections.abc import Callable, Coroutine, Mapping from contextlib import suppress import functools import os -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Concatenate from music_assistant_models.enums import ( EventType, @@ -102,14 +102,14 @@ ATTR_AUTO_PLAY = "auto_play" def catch_musicassistant_error[_R, **P]( - func: Callable[..., Awaitable[_R]], -) -> Callable[..., Coroutine[Any, Any, _R | None]]: + func: Callable[Concatenate[MusicAssistantPlayer, P], Coroutine[Any, Any, _R]], +) -> Callable[Concatenate[MusicAssistantPlayer, P], Coroutine[Any, Any, _R]]: """Check and log commands to players.""" @functools.wraps(func) async def wrapper( self: MusicAssistantPlayer, *args: P.args, **kwargs: P.kwargs - ) -> _R | None: + ) -> _R: """Catch Music Assistant errors and convert to Home Assistant error.""" try: return await func(self, *args, **kwargs) From dc18e62e1e5c18a52678f518c09f7d27378191b5 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Thu, 12 Dec 2024 14:38:55 +0100 Subject: [PATCH 120/677] Bump ruff to 0.8.2 (#133041) --- .pre-commit-config.yaml | 2 +- requirements_test_pre_commit.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9947ee05ad1..5d65225f512 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.1 + rev: v0.8.2 hooks: - id: ruff args: diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index b263373f11d..aa04dbeb6d0 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.8.1 +ruff==0.8.2 yamllint==1.35.1 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 98edb9c458f..afedbd23cfe 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -22,7 +22,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.1 \ + stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.2 \ PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.9 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" From f05d18ea70cd2581d5ca317e50ccda7f5ad283f1 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 14:42:05 +0100 Subject: [PATCH 121/677] Small test improvements to Tailwind tests (#133051) --- .../tailwind/snapshots/test_config_flow.ambr | 89 ------------- tests/components/tailwind/test_config_flow.py | 125 +++++++++++------- tests/components/tailwind/test_init.py | 4 +- 3 files changed, 78 insertions(+), 140 deletions(-) delete mode 100644 tests/components/tailwind/snapshots/test_config_flow.ambr diff --git a/tests/components/tailwind/snapshots/test_config_flow.ambr b/tests/components/tailwind/snapshots/test_config_flow.ambr deleted file mode 100644 index 09bf25cb96e..00000000000 --- a/tests/components/tailwind/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,89 +0,0 @@ -# serializer version: 1 -# name: test_user_flow - FlowResultSnapshot({ - 'context': dict({ - 'source': 'user', - 'unique_id': '3c:e9:0e:6d:21:84', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'tailwind', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'tailwind', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Tailwind iQ3', - 'unique_id': '3c:e9:0e:6d:21:84', - 'version': 1, - }), - 'title': 'Tailwind iQ3', - 'type': , - 'version': 1, - }) -# --- -# name: test_zeroconf_flow - FlowResultSnapshot({ - 'context': dict({ - 'configuration_url': 'https://web.gotailwind.com/client/integration/local-control-key', - 'source': 'zeroconf', - 'title_placeholders': dict({ - 'name': 'Tailwind iQ3', - }), - 'unique_id': '3c:e9:0e:6d:21:84', - }), - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'tailwind', - 'minor_version': 1, - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': '127.0.0.1', - 'token': '987654', - }), - 'disabled_by': None, - 'discovery_keys': dict({ - }), - 'domain': 'tailwind', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'zeroconf', - 'title': 'Tailwind iQ3', - 'unique_id': '3c:e9:0e:6d:21:84', - 'version': 1, - }), - 'title': 'Tailwind iQ3', - 'type': , - 'version': 1, - }) -# --- diff --git a/tests/components/tailwind/test_config_flow.py b/tests/components/tailwind/test_config_flow.py index d2d15172718..ca6fbacf0fc 100644 --- a/tests/components/tailwind/test_config_flow.py +++ b/tests/components/tailwind/test_config_flow.py @@ -25,20 +25,17 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.usefixtures("mock_tailwind") -async def test_user_flow( - hass: HomeAssistant, - snapshot: SnapshotAssertion, -) -> None: +async def test_user_flow(hass: HomeAssistant) -> None: """Test the full happy path user flow from start to finish.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_HOST: "127.0.0.1", @@ -46,8 +43,15 @@ async def test_user_flow( }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_TOKEN: "987654", + } + assert not config_entry.options @pytest.mark.parametrize( @@ -76,19 +80,27 @@ async def test_user_flow_errors( }, ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert result.get("errors") == expected_error + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == expected_error mock_tailwind.status.side_effect = None - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_HOST: "127.0.0.2", CONF_TOKEN: "123456", }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.2", + CONF_TOKEN: "123456", + } + assert not config_entry.options async def test_user_flow_unsupported_firmware_version( @@ -105,8 +117,8 @@ async def test_user_flow_unsupported_firmware_version( }, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "unsupported_firmware" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unsupported_firmware" @pytest.mark.usefixtures("mock_tailwind") @@ -129,8 +141,8 @@ async def test_user_flow_already_configured( }, ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" assert mock_config_entry.data[CONF_TOKEN] == "987654" @@ -160,19 +172,26 @@ async def test_zeroconf_flow( ), ) - assert result.get("step_id") == "zeroconf_confirm" - assert result.get("type") is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + assert result["type"] is FlowResultType.FORM progress = hass.config_entries.flow.async_progress() assert len(progress) == 1 assert progress[0].get("flow_id") == result["flow_id"] - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_TOKEN: "987654"} ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2 == snapshot + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_TOKEN: "987654", + } + assert not config_entry.options @pytest.mark.parametrize( @@ -200,8 +219,8 @@ async def test_zeroconf_flow_abort_incompatible_properties( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == expected_reason + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == expected_reason @pytest.mark.parametrize( @@ -240,25 +259,33 @@ async def test_zeroconf_flow_errors( ), ) - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "zeroconf_confirm" - assert result2.get("errors") == expected_error + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + assert result["errors"] == expected_error mock_tailwind.status.side_effect = None - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "3c:e9:0e:6d:21:84" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_TOKEN: "123456", + } + assert not config_entry.options @pytest.mark.usefixtures("mock_tailwind") @@ -292,8 +319,8 @@ async def test_zeroconf_flow_not_discovered_again( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" @@ -307,17 +334,17 @@ async def test_reauth_flow( assert mock_config_entry.data[CONF_TOKEN] == "123456" result = await mock_config_entry.start_reauth_flow(hass) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "reauth_confirm" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_TOKEN: "987654"}, ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data[CONF_TOKEN] == "987654" @@ -343,27 +370,27 @@ async def test_reauth_flow_errors( result = await mock_config_entry.start_reauth_flow(hass) - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "reauth_confirm" - assert result2.get("errors") == expected_error + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == expected_error mock_tailwind.status.side_effect = None - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_TOKEN: "123456", }, ) - assert result3.get("type") is FlowResultType.ABORT - assert result3.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" async def test_dhcp_discovery_updates_entry( @@ -384,8 +411,8 @@ async def test_dhcp_discovery_updates_entry( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" @@ -404,5 +431,5 @@ async def test_dhcp_discovery_ignores_unknown(hass: HomeAssistant) -> None: ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "unknown" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" diff --git a/tests/components/tailwind/test_init.py b/tests/components/tailwind/test_init.py index 8ea5f1108f4..8e075a26279 100644 --- a/tests/components/tailwind/test_init.py +++ b/tests/components/tailwind/test_init.py @@ -66,8 +66,8 @@ async def test_config_entry_authentication_failed( assert len(flows) == 1 flow = flows[0] - assert flow.get("step_id") == "reauth_confirm" - assert flow.get("handler") == DOMAIN + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN assert "context" in flow assert flow["context"].get("source") == SOURCE_REAUTH From 006b3b0e2235e397262cbcc6dcacea2a79bca44b Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 12 Dec 2024 14:51:15 +0100 Subject: [PATCH 122/677] Bump uv to 0.5.8 (#133036) --- Dockerfile | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index 61d64212b40..630fc19496c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ ENV \ ARG QEMU_CPU # Install uv -RUN pip3 install uv==0.5.4 +RUN pip3 install uv==0.5.8 WORKDIR /usr/src diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index e7d46787f5d..b2dd0cf251c 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -65,7 +65,7 @@ standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 -uv==0.5.4 +uv==0.5.8 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 diff --git a/pyproject.toml b/pyproject.toml index 375e57126f2..2930d381d2a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,7 +75,7 @@ dependencies = [ # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 "urllib3>=1.26.5,<2", - "uv==0.5.4", + "uv==0.5.8", "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", diff --git a/requirements.txt b/requirements.txt index e43822553f3..e80804569d3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -43,7 +43,7 @@ standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 -uv==0.5.4 +uv==0.5.8 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index afedbd23cfe..a4f33c3ad40 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -14,7 +14,7 @@ WORKDIR "/github/workspace" COPY . /usr/src/homeassistant # Uv is only needed during build -RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ +RUN --mount=from=ghcr.io/astral-sh/uv:0.5.8,source=/uv,target=/bin/uv \ # Required for PyTurboJPEG apk add --no-cache libturbojpeg \ && uv pip install \ From 6d042d987fbe2634bbb56c33f83d8dcf5dcab6bf Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:11:13 +0100 Subject: [PATCH 123/677] Migrate emulated_hue light tests to use Kelvin (#133006) --- tests/components/emulated_hue/test_hue_api.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/tests/components/emulated_hue/test_hue_api.py b/tests/components/emulated_hue/test_hue_api.py index a445f8bae0d..8a340d5e2dd 100644 --- a/tests/components/emulated_hue/test_hue_api.py +++ b/tests/components/emulated_hue/test_hue_api.py @@ -793,7 +793,10 @@ async def test_put_light_state( await hass_hue.services.async_call( light.DOMAIN, const.SERVICE_TURN_ON, - {const.ATTR_ENTITY_ID: "light.ceiling_lights", light.ATTR_COLOR_TEMP: 20}, + { + const.ATTR_ENTITY_ID: "light.ceiling_lights", + light.ATTR_COLOR_TEMP_KELVIN: 50000, + }, blocking=True, ) @@ -802,8 +805,10 @@ async def test_put_light_state( ) assert ( - hass_hue.states.get("light.ceiling_lights").attributes[light.ATTR_COLOR_TEMP] - == 50 + hass_hue.states.get("light.ceiling_lights").attributes[ + light.ATTR_COLOR_TEMP_KELVIN + ] + == 20000 ) # mock light.turn_on call @@ -1785,7 +1790,7 @@ async def test_get_light_state_when_none( light.ATTR_BRIGHTNESS: None, light.ATTR_RGB_COLOR: None, light.ATTR_HS_COLOR: None, - light.ATTR_COLOR_TEMP: None, + light.ATTR_COLOR_TEMP_KELVIN: None, light.ATTR_XY_COLOR: None, light.ATTR_SUPPORTED_COLOR_MODES: [ light.COLOR_MODE_COLOR_TEMP, @@ -1813,7 +1818,7 @@ async def test_get_light_state_when_none( light.ATTR_BRIGHTNESS: None, light.ATTR_RGB_COLOR: None, light.ATTR_HS_COLOR: None, - light.ATTR_COLOR_TEMP: None, + light.ATTR_COLOR_TEMP_KELVIN: None, light.ATTR_XY_COLOR: None, light.ATTR_SUPPORTED_COLOR_MODES: [ light.COLOR_MODE_COLOR_TEMP, From 37f2bde6f54bd65245c109c4c1e37cba8cc7ce45 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:11:34 +0100 Subject: [PATCH 124/677] Migrate esphome light tests to use Kelvin (#133008) --- tests/components/esphome/test_light.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/tests/components/esphome/test_light.py b/tests/components/esphome/test_light.py index 7f275fff4f2..8e4f37079d1 100644 --- a/tests/components/esphome/test_light.py +++ b/tests/components/esphome/test_light.py @@ -20,9 +20,7 @@ from homeassistant.components.light import ( ATTR_FLASH, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, - ATTR_MIN_MIREDS, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -1379,9 +1377,6 @@ async def test_light_color_temp( assert state.state == STATE_ON attributes = state.attributes - assert attributes[ATTR_MIN_MIREDS] == 153 - assert attributes[ATTR_MAX_MIREDS] == 370 - assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 await hass.services.async_call( @@ -1454,9 +1449,6 @@ async def test_light_color_temp_no_mireds_set( assert state.state == STATE_ON attributes = state.attributes - assert attributes[ATTR_MIN_MIREDS] is None - assert attributes[ATTR_MAX_MIREDS] is None - assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 0 assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 0 await hass.services.async_call( @@ -1558,8 +1550,6 @@ async def test_light_color_temp_legacy( assert attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.COLOR_TEMP] - assert attributes[ATTR_MIN_MIREDS] == 153 - assert attributes[ATTR_MAX_MIREDS] == 370 assert attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 From 839312c65ce4e98024ad60ea3adabb96b0d5e9de Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:11:52 +0100 Subject: [PATCH 125/677] Migrate homekit light tests to use Kelvin (#133011) --- tests/components/homekit/test_type_lights.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/homekit/test_type_lights.py b/tests/components/homekit/test_type_lights.py index a45e4988c36..fb059b93a13 100644 --- a/tests/components/homekit/test_type_lights.py +++ b/tests/components/homekit/test_type_lights.py @@ -20,8 +20,8 @@ from homeassistant.components.light import ( ATTR_COLOR_MODE, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -1391,8 +1391,8 @@ async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver) -> None: { ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP], ATTR_BRIGHTNESS: 255, - ATTR_MAX_MIREDS: 500.5, - ATTR_MIN_MIREDS: 153.5, + ATTR_MIN_COLOR_TEMP_KELVIN: 1999, + ATTR_MAX_COLOR_TEMP_KELVIN: 6499, }, ) await hass.async_block_till_done() From 0a748252e757f423fb5511dbfa7d8f8e9d734311 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:14:28 +0100 Subject: [PATCH 126/677] Improve Callable annotations (#133050) --- homeassistant/components/crownstone/config_flow.py | 2 +- homeassistant/components/dsmr/sensor.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/crownstone/config_flow.py b/homeassistant/components/crownstone/config_flow.py index bf6e9204714..2a96098421a 100644 --- a/homeassistant/components/crownstone/config_flow.py +++ b/homeassistant/components/crownstone/config_flow.py @@ -49,7 +49,7 @@ class BaseCrownstoneFlowHandler(ConfigEntryBaseFlow): cloud: CrownstoneCloud def __init__( - self, flow_type: str, create_entry_cb: Callable[..., ConfigFlowResult] + self, flow_type: str, create_entry_cb: Callable[[], ConfigFlowResult] ) -> None: """Set up flow instance.""" self.flow_type = flow_type diff --git a/homeassistant/components/dsmr/sensor.py b/homeassistant/components/dsmr/sensor.py index a069c32be04..213e948bafb 100644 --- a/homeassistant/components/dsmr/sensor.py +++ b/homeassistant/components/dsmr/sensor.py @@ -549,7 +549,7 @@ async def async_setup_entry( dsmr_version = entry.data[CONF_DSMR_VERSION] entities: list[DSMREntity] = [] initialized: bool = False - add_entities_handler: Callable[..., None] | None + add_entities_handler: Callable[[], None] | None @callback def init_async_add_entities(telegram: Telegram) -> None: From 5c6e4ad191c755315de87a77af05d61655f3929a Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:01:57 +0100 Subject: [PATCH 127/677] Use PEP 695 TypeVar syntax (#133049) --- homeassistant/components/motionblinds_ble/sensor.py | 7 ++----- homeassistant/components/powerfox/sensor.py | 7 +++---- homeassistant/components/powerwall/sensor.py | 12 +++++------- homeassistant/helpers/event.py | 7 +++---- 4 files changed, 13 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/motionblinds_ble/sensor.py b/homeassistant/components/motionblinds_ble/sensor.py index aa0f5ef7c90..740a0509a9e 100644 --- a/homeassistant/components/motionblinds_ble/sensor.py +++ b/homeassistant/components/motionblinds_ble/sensor.py @@ -6,7 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass import logging from math import ceil -from typing import Generic, TypeVar from motionblindsble.const import ( MotionBlindType, @@ -45,11 +44,9 @@ _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 -_T = TypeVar("_T") - @dataclass(frozen=True, kw_only=True) -class MotionblindsBLESensorEntityDescription(SensorEntityDescription, Generic[_T]): +class MotionblindsBLESensorEntityDescription[_T](SensorEntityDescription): """Entity description of a sensor entity with initial_value attribute.""" initial_value: str | None = None @@ -110,7 +107,7 @@ async def async_setup_entry( async_add_entities(entities) -class MotionblindsBLESensorEntity(MotionblindsBLEEntity, SensorEntity, Generic[_T]): +class MotionblindsBLESensorEntity[_T](MotionblindsBLEEntity, SensorEntity): """Representation of a sensor entity.""" entity_description: MotionblindsBLESensorEntityDescription[_T] diff --git a/homeassistant/components/powerfox/sensor.py b/homeassistant/components/powerfox/sensor.py index af6f0301b0c..7771f96dd81 100644 --- a/homeassistant/components/powerfox/sensor.py +++ b/homeassistant/components/powerfox/sensor.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Generic, TypeVar from powerfox import Device, PowerMeter, WaterMeter @@ -22,11 +21,11 @@ from . import PowerfoxConfigEntry from .coordinator import PowerfoxDataUpdateCoordinator from .entity import PowerfoxEntity -T = TypeVar("T", PowerMeter, WaterMeter) - @dataclass(frozen=True, kw_only=True) -class PowerfoxSensorEntityDescription(Generic[T], SensorEntityDescription): +class PowerfoxSensorEntityDescription[T: (PowerMeter, WaterMeter)]( + SensorEntityDescription +): """Describes Poweropti sensor entity.""" value_fn: Callable[[T], float | int | None] diff --git a/homeassistant/components/powerwall/sensor.py b/homeassistant/components/powerwall/sensor.py index 9423d65b0fc..28506e2a60c 100644 --- a/homeassistant/components/powerwall/sensor.py +++ b/homeassistant/components/powerwall/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass from operator import attrgetter, methodcaller -from typing import TYPE_CHECKING, Generic, TypeVar +from typing import TYPE_CHECKING from tesla_powerwall import GridState, MeterResponse, MeterType @@ -35,14 +35,12 @@ from .models import BatteryResponse, PowerwallConfigEntry, PowerwallRuntimeData _METER_DIRECTION_EXPORT = "export" _METER_DIRECTION_IMPORT = "import" -_ValueParamT = TypeVar("_ValueParamT") -_ValueT = TypeVar("_ValueT", bound=float | int | str | None) +type _ValueType = float | int | str | None @dataclass(frozen=True, kw_only=True) -class PowerwallSensorEntityDescription( - SensorEntityDescription, - Generic[_ValueParamT, _ValueT], +class PowerwallSensorEntityDescription[_ValueParamT, _ValueT: _ValueType]( + SensorEntityDescription ): """Describes Powerwall entity.""" @@ -389,7 +387,7 @@ class PowerWallImportSensor(PowerWallEnergyDirectionSensor): return meter.get_energy_imported() -class PowerWallBatterySensor(BatteryEntity, SensorEntity, Generic[_ValueT]): +class PowerWallBatterySensor[_ValueT: _ValueType](BatteryEntity, SensorEntity): """Representation of an Powerwall Battery sensor.""" entity_description: PowerwallSensorEntityDescription[BatteryResponse, _ValueT] diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 578132f358f..72a4ef3c050 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -90,7 +90,6 @@ RANDOM_MICROSECOND_MIN = 50000 RANDOM_MICROSECOND_MAX = 500000 _TypedDictT = TypeVar("_TypedDictT", bound=Mapping[str, Any]) -_StateEventDataT = TypeVar("_StateEventDataT", bound=EventStateEventData) @dataclass(slots=True, frozen=True) @@ -333,7 +332,7 @@ def async_track_state_change_event( @callback -def _async_dispatch_entity_id_event_soon( +def _async_dispatch_entity_id_event_soon[_StateEventDataT: EventStateEventData]( hass: HomeAssistant, callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], event: Event[_StateEventDataT], @@ -343,7 +342,7 @@ def _async_dispatch_entity_id_event_soon( @callback -def _async_dispatch_entity_id_event( +def _async_dispatch_entity_id_event[_StateEventDataT: EventStateEventData]( hass: HomeAssistant, callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], event: Event[_StateEventDataT], @@ -363,7 +362,7 @@ def _async_dispatch_entity_id_event( @callback -def _async_state_filter( +def _async_state_filter[_StateEventDataT: EventStateEventData]( hass: HomeAssistant, callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], event_data: _StateEventDataT, From 33c799b2d074bbc8feb3417315fb27ea5b6ee88f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:42:10 +0100 Subject: [PATCH 128/677] Migrate mqtt light tests to use Kelvin (#133035) --- tests/components/mqtt/test_light_json.py | 6 +++--- tests/components/mqtt/test_light_template.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 7d8ff241d3c..18627c4f6ef 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -435,7 +435,7 @@ async def test_single_color_mode( assert state.state == STATE_ON assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - assert state.attributes.get(light.ATTR_COLOR_TEMP) == 192 + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 5208 assert state.attributes.get(light.ATTR_BRIGHTNESS) == 50 assert state.attributes.get(light.ATTR_COLOR_MODE) == color_modes[0] @@ -494,7 +494,7 @@ async def test_controlling_state_with_unknown_color_mode( ) state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get(light.ATTR_COLOR_TEMP) is None + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) is None assert state.attributes.get(light.ATTR_BRIGHTNESS) is None assert state.attributes.get(light.ATTR_COLOR_MODE) == light.ColorMode.UNKNOWN @@ -507,7 +507,7 @@ async def test_controlling_state_with_unknown_color_mode( state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get(light.ATTR_COLOR_TEMP) == 192 + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 5208 assert state.attributes.get(light.ATTR_BRIGHTNESS) == 50 assert state.attributes.get(light.ATTR_COLOR_MODE) == light.ColorMode.COLOR_TEMP diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index 64cdff370be..b17637e43b0 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -212,7 +212,7 @@ async def test_single_color_mode( assert state.state == STATE_ON assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - assert state.attributes.get(light.ATTR_COLOR_TEMP) == 192 + assert state.attributes.get(light.ATTR_COLOR_TEMP_KELVIN) == 5208 assert state.attributes.get(light.ATTR_BRIGHTNESS) == 50 assert state.attributes.get(light.ATTR_COLOR_MODE) == color_modes[0] From 2ce2765e674fe6ebc0f8d9abadda5ccc14e583a2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:49:25 +0100 Subject: [PATCH 129/677] Adjust light test helpers to use Kelvin, and cleanup unused helpers (#133048) Cleanup light test helper methods --- .core_files.yaml | 1 + tests/components/light/common.py | 107 +------------------ tests/components/mqtt/test_light.py | 4 +- tests/components/mqtt/test_light_json.py | 12 ++- tests/components/mqtt/test_light_template.py | 8 +- tests/components/tasmota/test_light.py | 10 +- 6 files changed, 28 insertions(+), 114 deletions(-) diff --git a/.core_files.yaml b/.core_files.yaml index cc99487f68d..2624c4432be 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -132,6 +132,7 @@ tests: &tests - tests/components/conftest.py - tests/components/diagnostics/** - tests/components/history/** + - tests/components/light/common.py - tests/components/logbook/** - tests/components/recorder/** - tests/components/repairs/** diff --git a/tests/components/light/common.py b/tests/components/light/common.py index 147f2336876..d696c7ab8cf 100644 --- a/tests/components/light/common.py +++ b/tests/components/light/common.py @@ -10,11 +10,10 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_PROFILE, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -35,54 +34,10 @@ from homeassistant.const import ( SERVICE_TURN_ON, ) from homeassistant.core import HomeAssistant -from homeassistant.loader import bind_hass from tests.common import MockToggleEntity -@bind_hass -def turn_on( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - brightness: int | None = None, - brightness_pct: float | None = None, - rgb_color: tuple[int, int, int] | None = None, - rgbw_color: tuple[int, int, int, int] | None = None, - rgbww_color: tuple[int, int, int, int, int] | None = None, - xy_color: tuple[float, float] | None = None, - hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, - profile: str | None = None, - flash: str | None = None, - effect: str | None = None, - color_name: str | None = None, - white: bool | None = None, -) -> None: - """Turn all or specified light on.""" - hass.add_job( - async_turn_on, - hass, - entity_id, - transition, - brightness, - brightness_pct, - rgb_color, - rgbw_color, - rgbww_color, - xy_color, - hs_color, - color_temp, - kelvin, - profile, - flash, - effect, - color_name, - white, - ) - - async def async_turn_on( hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, @@ -94,8 +49,7 @@ async def async_turn_on( rgbww_color: tuple[int, int, int, int, int] | None = None, xy_color: tuple[float, float] | None = None, hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, + color_temp_kelvin: int | None = None, profile: str | None = None, flash: str | None = None, effect: str | None = None, @@ -116,8 +70,7 @@ async def async_turn_on( (ATTR_RGBWW_COLOR, rgbww_color), (ATTR_XY_COLOR, xy_color), (ATTR_HS_COLOR, hs_color), - (ATTR_COLOR_TEMP, color_temp), - (ATTR_KELVIN, kelvin), + (ATTR_COLOR_TEMP_KELVIN, color_temp_kelvin), (ATTR_FLASH, flash), (ATTR_EFFECT, effect), (ATTR_COLOR_NAME, color_name), @@ -129,17 +82,6 @@ async def async_turn_on( await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) -@bind_hass -def turn_off( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - flash: str | None = None, -) -> None: - """Turn all or specified light off.""" - hass.add_job(async_turn_off, hass, entity_id, transition, flash) - - async def async_turn_off( hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, @@ -160,43 +102,6 @@ async def async_turn_off( await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) -@bind_hass -def toggle( - hass: HomeAssistant, - entity_id: str = ENTITY_MATCH_ALL, - transition: float | None = None, - brightness: int | None = None, - brightness_pct: float | None = None, - rgb_color: tuple[int, int, int] | None = None, - xy_color: tuple[float, float] | None = None, - hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, - profile: str | None = None, - flash: str | None = None, - effect: str | None = None, - color_name: str | None = None, -) -> None: - """Toggle all or specified light.""" - hass.add_job( - async_toggle, - hass, - entity_id, - transition, - brightness, - brightness_pct, - rgb_color, - xy_color, - hs_color, - color_temp, - kelvin, - profile, - flash, - effect, - color_name, - ) - - async def async_toggle( hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL, @@ -206,8 +111,7 @@ async def async_toggle( rgb_color: tuple[int, int, int] | None = None, xy_color: tuple[float, float] | None = None, hs_color: tuple[float, float] | None = None, - color_temp: int | None = None, - kelvin: int | None = None, + color_temp_kelvin: int | None = None, profile: str | None = None, flash: str | None = None, effect: str | None = None, @@ -225,8 +129,7 @@ async def async_toggle( (ATTR_RGB_COLOR, rgb_color), (ATTR_XY_COLOR, xy_color), (ATTR_HS_COLOR, hs_color), - (ATTR_COLOR_TEMP, color_temp), - (ATTR_KELVIN, kelvin), + (ATTR_COLOR_TEMP_KELVIN, color_temp_kelvin), (ATTR_FLASH, flash), (ATTR_EFFECT, effect), (ATTR_COLOR_NAME, color_name), diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index 8e9e2abb85a..ed4b16e3d0c 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -1148,7 +1148,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get(light.ATTR_COLOR_MODE) == "xy" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes - await common.async_turn_on(hass, "light.test", color_temp=125) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=8000) mqtt_mock.async_publish.assert_has_calls( [ call("test_light_rgb/color_temp/set", "125", 2, False), @@ -1321,7 +1321,7 @@ async def test_sending_mqtt_color_temp_command_with_template( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", color_temp=100) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=10000) mqtt_mock.async_publish.assert_has_calls( [ diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 18627c4f6ef..b1031bec342 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -423,7 +423,9 @@ async def test_single_color_mode( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", brightness=50, color_temp=192) + await common.async_turn_on( + hass, "light.test", brightness=50, color_temp_kelvin=5208 + ) async_fire_mqtt_message( hass, @@ -458,7 +460,9 @@ async def test_turn_on_with_unknown_color_mode_optimistic( assert state.state == STATE_ON # Turn on the light with brightness or color_temp attributes - await common.async_turn_on(hass, "light.test", brightness=50, color_temp=192) + await common.async_turn_on( + hass, "light.test", brightness=50, color_temp_kelvin=5208 + ) state = hass.states.get("light.test") assert state.attributes.get("color_mode") == light.ColorMode.COLOR_TEMP assert state.attributes.get("brightness") == 50 @@ -1083,7 +1087,7 @@ async def test_sending_mqtt_commands_and_optimistic( state = hass.states.get("light.test") assert state.state == STATE_ON - await common.async_turn_on(hass, "light.test", color_temp=90) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=11111) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", @@ -1244,7 +1248,7 @@ async def test_sending_mqtt_commands_and_optimistic2( assert state.state == STATE_ON # Turn the light on with color temperature - await common.async_turn_on(hass, "light.test", color_temp=90) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=11111) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator('{"state":"ON","color_temp":90}'), diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index b17637e43b0..5ffff578b5b 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -205,7 +205,9 @@ async def test_single_color_mode( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN - await common.async_turn_on(hass, "light.test", brightness=50, color_temp=192) + await common.async_turn_on( + hass, "light.test", brightness=50, color_temp_kelvin=5208 + ) async_fire_mqtt_message(hass, "test_light", "on,50,192") color_modes = [light.ColorMode.COLOR_TEMP] state = hass.states.get("light.test") @@ -463,7 +465,7 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.state == STATE_ON # Set color_temp - await common.async_turn_on(hass, "light.test", color_temp=70) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=14285) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,70,--,-", 2, False ) @@ -594,7 +596,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( assert state.state == STATE_UNKNOWN # Set color_temp - await common.async_turn_on(hass, "light.test", color_temp=70) + await common.async_turn_on(hass, "light.test", color_temp_kelvin=14285) mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", "on,,70,--,-", 0, False ) diff --git a/tests/components/tasmota/test_light.py b/tests/components/tasmota/test_light.py index f5802c509bf..4f4daee1301 100644 --- a/tests/components/tasmota/test_light.py +++ b/tests/components/tasmota/test_light.py @@ -1108,7 +1108,7 @@ async def test_sending_mqtt_commands_rgbww( ) mqtt_mock.async_publish.reset_mock() - await common.async_turn_on(hass, "light.tasmota_test", color_temp=200) + await common.async_turn_on(hass, "light.tasmota_test", color_temp_kelvin=5000) mqtt_mock.async_publish.assert_called_once_with( "tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 ON;NoDelay;CT 200", @@ -1350,7 +1350,9 @@ async def test_transition( assert state.attributes.get("color_temp") == 153 # Set color_temp of the light from 153 to 500 @ 50%: Speed should be 6*2*2=24 - await common.async_turn_on(hass, "light.tasmota_test", color_temp=500, transition=6) + await common.async_turn_on( + hass, "light.tasmota_test", color_temp_kelvin=2000, transition=6 + ) mqtt_mock.async_publish.assert_called_once_with( "tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade2 1;NoDelay;Speed2 24;NoDelay;Power1 ON;NoDelay;CT 500", @@ -1369,7 +1371,9 @@ async def test_transition( assert state.attributes.get("color_temp") == 500 # Set color_temp of the light from 500 to 326 @ 50%: Speed should be 6*2*2*2=48->40 - await common.async_turn_on(hass, "light.tasmota_test", color_temp=326, transition=6) + await common.async_turn_on( + hass, "light.tasmota_test", color_temp_kelvin=3067, transition=6 + ) mqtt_mock.async_publish.assert_called_once_with( "tasmota_49A3BC/cmnd/Backlog", "NoDelay;Fade2 1;NoDelay;Speed2 40;NoDelay;Power1 ON;NoDelay;CT 326", From 0b18e51a13ef5e3f3fd24a9ab9df8f8cfd82b10e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:49:50 +0100 Subject: [PATCH 130/677] Remove reference to self.min/max_mireds in mqtt light (#133055) --- homeassistant/components/mqtt/light/schema_basic.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index a4d3ecb5f21..9cc50daa329 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -486,10 +486,8 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): def _converter( r: int, g: int, b: int, cw: int, ww: int ) -> tuple[int, int, int]: - min_kelvin = color_util.color_temperature_mired_to_kelvin(self.max_mireds) - max_kelvin = color_util.color_temperature_mired_to_kelvin(self.min_mireds) return color_util.color_rgbww_to_rgb( - r, g, b, cw, ww, min_kelvin, max_kelvin + r, g, b, cw, ww, self.min_color_temp_kelvin, self.max_color_temp_kelvin ) rgbww = self._rgbx_received( From 3d201690ce460f5cb9fa31adca6477ac63bbeb44 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 12 Dec 2024 16:54:21 +0100 Subject: [PATCH 131/677] Fix load of backup store (#133024) * Fix load of backup store * Tweak type annotations in test * Fix tests * Remove the new test * Remove snapshots --- homeassistant/components/backup/config.py | 32 ++++++++++++--- tests/components/backup/conftest.py | 20 +++++++++- tests/components/backup/test_websocket.py | 47 ++++------------------- 3 files changed, 53 insertions(+), 46 deletions(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index 6304d0aa90b..32dfa95509c 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -33,8 +33,8 @@ class StoredBackupConfig(TypedDict): """Represent the stored backup config.""" create_backup: StoredCreateBackupConfig - last_attempted_strategy_backup: datetime | None - last_completed_strategy_backup: datetime | None + last_attempted_strategy_backup: str | None + last_completed_strategy_backup: str | None retention: StoredRetentionConfig schedule: StoredBackupSchedule @@ -59,6 +59,16 @@ class BackupConfigData: include_folders = None retention = data["retention"] + if last_attempted_str := data["last_attempted_strategy_backup"]: + last_attempted = dt_util.parse_datetime(last_attempted_str) + else: + last_attempted = None + + if last_attempted_str := data["last_completed_strategy_backup"]: + last_completed = dt_util.parse_datetime(last_attempted_str) + else: + last_completed = None + return cls( create_backup=CreateBackupConfig( agent_ids=data["create_backup"]["agent_ids"], @@ -69,8 +79,8 @@ class BackupConfigData: name=data["create_backup"]["name"], password=data["create_backup"]["password"], ), - last_attempted_strategy_backup=data["last_attempted_strategy_backup"], - last_completed_strategy_backup=data["last_completed_strategy_backup"], + last_attempted_strategy_backup=last_attempted, + last_completed_strategy_backup=last_completed, retention=RetentionConfig( copies=retention["copies"], days=retention["days"], @@ -80,10 +90,20 @@ class BackupConfigData: def to_dict(self) -> StoredBackupConfig: """Convert backup config data to a dict.""" + if self.last_attempted_strategy_backup: + last_attempted = self.last_attempted_strategy_backup.isoformat() + else: + last_attempted = None + + if self.last_completed_strategy_backup: + last_completed = self.last_completed_strategy_backup.isoformat() + else: + last_completed = None + return StoredBackupConfig( create_backup=self.create_backup.to_dict(), - last_attempted_strategy_backup=self.last_attempted_strategy_backup, - last_completed_strategy_backup=self.last_completed_strategy_backup, + last_attempted_strategy_backup=last_attempted, + last_completed_strategy_backup=last_completed, retention=self.retention.to_dict(), schedule=self.schedule.to_dict(), ) diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py index 7ccfcc4e0f0..13f2537db47 100644 --- a/tests/components/backup/conftest.py +++ b/tests/components/backup/conftest.py @@ -2,12 +2,14 @@ from __future__ import annotations +from asyncio import Future from collections.abc import Generator from pathlib import Path -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest +from homeassistant.components.backup.manager import WrittenBackup from homeassistant.core import HomeAssistant from .common import TEST_BACKUP_PATH_ABC123 @@ -62,6 +64,22 @@ CONFIG_DIR = { CONFIG_DIR_DIRS = {Path(".storage"), Path("backups"), Path("tmp_backups")} +@pytest.fixture(name="create_backup") +def mock_create_backup() -> Generator[AsyncMock]: + """Mock manager create backup.""" + mock_written_backup = MagicMock(spec_set=WrittenBackup) + mock_written_backup.backup.backup_id = "abc123" + mock_written_backup.open_stream = AsyncMock() + mock_written_backup.release_stream = AsyncMock() + fut = Future() + fut.set_result(mock_written_backup) + with patch( + "homeassistant.components.backup.CoreBackupReaderWriter.async_create_backup" + ) as mock_create_backup: + mock_create_backup.return_value = (MagicMock(), fut) + yield mock_create_backup + + @pytest.fixture(name="mock_backup_generation") def mock_backup_generation_fixture( hass: HomeAssistant, mocked_json_bytes: Mock, mocked_tarfile: Mock diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 9df93ee9c46..518005e8470 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1,8 +1,6 @@ """Tests for the Backup integration.""" -from asyncio import Future from collections.abc import Generator -from datetime import datetime from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, call, patch @@ -17,7 +15,6 @@ from homeassistant.components.backup.manager import ( CreateBackupEvent, CreateBackupState, NewBackup, - WrittenBackup, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -89,22 +86,6 @@ def mock_delay_save() -> Generator[None]: yield -@pytest.fixture(name="create_backup") -def mock_create_backup() -> Generator[AsyncMock]: - """Mock manager create backup.""" - mock_written_backup = MagicMock(spec_set=WrittenBackup) - mock_written_backup.backup.backup_id = "abc123" - mock_written_backup.open_stream = AsyncMock() - mock_written_backup.release_stream = AsyncMock() - fut = Future() - fut.set_result(mock_written_backup) - with patch( - "homeassistant.components.backup.CoreBackupReaderWriter.async_create_backup" - ) as mock_create_backup: - mock_create_backup.return_value = (MagicMock(), fut) - yield mock_create_backup - - @pytest.fixture(name="delete_backup") def mock_delete_backup() -> Generator[AsyncMock]: """Mock manager delete backup.""" @@ -798,12 +779,8 @@ async def test_agents_info( "password": "test-password", }, "retention": {"copies": 3, "days": 7}, - "last_attempted_strategy_backup": datetime.fromisoformat( - "2024-10-26T04:45:00+01:00" - ), - "last_completed_strategy_backup": datetime.fromisoformat( - "2024-10-26T04:45:00+01:00" - ), + "last_attempted_strategy_backup": "2024-10-26T04:45:00+01:00", + "last_completed_strategy_backup": "2024-10-26T04:45:00+01:00", "schedule": {"state": "daily"}, }, }, @@ -838,12 +815,8 @@ async def test_agents_info( "password": None, }, "retention": {"copies": None, "days": 7}, - "last_attempted_strategy_backup": datetime.fromisoformat( - "2024-10-27T04:45:00+01:00" - ), - "last_completed_strategy_backup": datetime.fromisoformat( - "2024-10-26T04:45:00+01:00" - ), + "last_attempted_strategy_backup": "2024-10-27T04:45:00+01:00", + "last_completed_strategy_backup": "2024-10-26T04:45:00+01:00", "schedule": {"state": "never"}, }, }, @@ -1205,12 +1178,8 @@ async def test_config_schedule_logic( "password": "test-password", }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": datetime.fromisoformat( - last_completed_strategy_backup - ), - "last_completed_strategy_backup": datetime.fromisoformat( - last_completed_strategy_backup - ), + "last_attempted_strategy_backup": last_completed_strategy_backup, + "last_completed_strategy_backup": last_completed_strategy_backup, "schedule": {"state": "daily"}, }, } @@ -1486,7 +1455,7 @@ async def test_config_retention_copies_logic( }, "retention": {"copies": None, "days": None}, "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": datetime.fromisoformat(last_backup_time), + "last_completed_strategy_backup": last_backup_time, "schedule": {"state": "daily"}, }, } @@ -1699,7 +1668,7 @@ async def test_config_retention_days_logic( }, "retention": {"copies": None, "days": None}, "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": datetime.fromisoformat(last_backup_time), + "last_completed_strategy_backup": last_backup_time, "schedule": {"state": "never"}, }, } From 0726809228789d3b1846f080dd0e10dd747ca60c Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Thu, 12 Dec 2024 17:00:11 +0100 Subject: [PATCH 132/677] Bump velbusaio to 2024.12.1 (#133056) --- homeassistant/components/velbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 5725a10b6f6..600370f87d9 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.12.0"], + "requirements": ["velbus-aio==2024.12.1"], "usb": [ { "vid": "10CF", diff --git a/requirements_all.txt b/requirements_all.txt index fb873805873..ee253d174df 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2939,7 +2939,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.12.0 +velbus-aio==2024.12.1 # homeassistant.components.venstar venstarcolortouch==0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 83e7c89dd8b..65290d4b308 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2349,7 +2349,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.12.0 +velbus-aio==2024.12.1 # homeassistant.components.venstar venstarcolortouch==0.19 From e7a43cfe090c0ccce30342c2479c6d81f5f91541 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 18:13:24 +0100 Subject: [PATCH 133/677] Migrate deconz light tests to use Kelvin (#133002) --- tests/components/deconz/test_light.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/components/deconz/test_light.py b/tests/components/deconz/test_light.py index 15135a333ce..9ac15d4867b 100644 --- a/tests/components/deconz/test_light.py +++ b/tests/components/deconz/test_light.py @@ -11,7 +11,7 @@ from homeassistant.components.deconz.const import CONF_ALLOW_DECONZ_GROUPS from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -391,7 +391,7 @@ async def test_light_state_change( "call": { ATTR_ENTITY_ID: "light.hue_go", ATTR_BRIGHTNESS: 200, - ATTR_COLOR_TEMP: 200, + ATTR_COLOR_TEMP_KELVIN: 5000, ATTR_TRANSITION: 5, ATTR_FLASH: FLASH_SHORT, ATTR_EFFECT: EFFECT_COLORLOOP, @@ -804,7 +804,7 @@ async def test_groups( "call": { ATTR_ENTITY_ID: "light.group", ATTR_BRIGHTNESS: 200, - ATTR_COLOR_TEMP: 200, + ATTR_COLOR_TEMP_KELVIN: 5000, ATTR_TRANSITION: 5, ATTR_FLASH: FLASH_SHORT, ATTR_EFFECT: EFFECT_COLORLOOP, @@ -1079,7 +1079,7 @@ async def test_non_color_light_reports_color( hass.states.get("light.group").attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP ) - assert hass.states.get("light.group").attributes[ATTR_COLOR_TEMP] == 250 + assert hass.states.get("light.group").attributes[ATTR_COLOR_TEMP_KELVIN] == 4000 # Updating a scene will return a faulty color value # for a non-color light causing an exception in hs_color @@ -1099,7 +1099,7 @@ async def test_non_color_light_reports_color( group = hass.states.get("light.group") assert group.attributes[ATTR_COLOR_MODE] == ColorMode.XY assert group.attributes[ATTR_HS_COLOR] == (40.571, 41.176) - assert group.attributes.get(ATTR_COLOR_TEMP) is None + assert group.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None @pytest.mark.parametrize( From 39e4719a43051d364d13195e49452c1fcf5612a5 Mon Sep 17 00:00:00 2001 From: Martin Hjelmare Date: Thu, 12 Dec 2024 18:47:37 +0100 Subject: [PATCH 134/677] Fix backup strategy retention filter (#133060) * Fix lint * Update tests * Fix backup strategy retention filter --- homeassistant/components/backup/config.py | 9 +- tests/components/backup/test_websocket.py | 307 +++++++++++++++++++--- 2 files changed, 275 insertions(+), 41 deletions(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index 32dfa95509c..26ce691a4cc 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -423,7 +423,14 @@ async def _delete_filtered_backups( get_agent_errors, ) - LOGGER.debug("Total backups: %s", backups) + # only delete backups that are created by the backup strategy + backups = { + backup_id: backup + for backup_id, backup in backups.items() + if backup.with_strategy_settings + } + + LOGGER.debug("Total strategy backups: %s", backups) filtered_backups = backup_filter(backups) diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 518005e8470..4a94689c19e 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -14,6 +14,7 @@ from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN from homeassistant.components.backup.manager import ( CreateBackupEvent, CreateBackupState, + ManagerBackup, NewBackup, ) from homeassistant.core import HomeAssistant @@ -42,7 +43,7 @@ BACKUP_CALL = call( on_progress=ANY, ) -DEFAULT_STORAGE_DATA = { +DEFAULT_STORAGE_DATA: dict[str, Any] = { "backups": {}, "config": { "create_backup": { @@ -1248,9 +1249,26 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1270,9 +1288,26 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1292,10 +1327,31 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1315,10 +1371,31 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1338,9 +1415,26 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {"test-agent": BackupAgentError("Boom!")}, {}, @@ -1360,9 +1454,26 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {"test-agent": BackupAgentError("Boom!")}, @@ -1382,10 +1493,31 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), - "backup-4": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1405,7 +1537,16 @@ async def test_config_schedule_logic( "schedule": "daily", }, { - "backup-1": MagicMock(date="2024-11-12T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1518,8 +1659,21 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1538,8 +1692,21 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1558,9 +1725,26 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, @@ -1579,8 +1763,21 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {"test-agent": BackupAgentError("Boom!")}, {}, @@ -1599,8 +1796,21 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {"test-agent": BackupAgentError("Boom!")}, @@ -1619,9 +1829,26 @@ async def test_config_retention_copies_logic( "schedule": "never", }, { - "backup-1": MagicMock(date="2024-11-09T04:45:00+01:00"), - "backup-2": MagicMock(date="2024-11-10T04:45:00+01:00"), - "backup-3": MagicMock(date="2024-11-11T04:45:00+01:00"), + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), }, {}, {}, From a6b785d937157009e339f6c6fc03dcac2e7891dc Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Thu, 12 Dec 2024 19:11:07 +0100 Subject: [PATCH 135/677] Update frontend to 20241127.8 (#133066) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index bfc08c6e11e..1f9988dff38 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.7"] + "requirements": ["home-assistant-frontend==20241127.8"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b2dd0cf251c..65a6890024f 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.87.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.7 +home-assistant-frontend==20241127.8 home-assistant-intents==2024.12.9 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index ee253d174df..e866ba901cc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1128,7 +1128,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.7 +home-assistant-frontend==20241127.8 # homeassistant.components.conversation home-assistant-intents==2024.12.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 65290d4b308..b93673f45bd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -954,7 +954,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.7 +home-assistant-frontend==20241127.8 # homeassistant.components.conversation home-assistant-intents==2024.12.9 From 12051787027352e13ea7a2835d590a88230bc31f Mon Sep 17 00:00:00 2001 From: Andrew Sayre <6730289+andrewsayre@users.noreply.github.com> Date: Thu, 12 Dec 2024 12:32:00 -0600 Subject: [PATCH 136/677] Add HEOS quality scale (#132311) --- .../components/heos/quality_scale.yaml | 114 ++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 114 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/heos/quality_scale.yaml diff --git a/homeassistant/components/heos/quality_scale.yaml b/homeassistant/components/heos/quality_scale.yaml new file mode 100644 index 00000000000..ed9939bf37c --- /dev/null +++ b/homeassistant/components/heos/quality_scale.yaml @@ -0,0 +1,114 @@ +rules: + # Bronze + action-setup: + status: todo + comment: Future enhancement to move custom actions for login/out into an options flow. + appropriate-polling: + status: done + comment: Integration is a local push integration + brands: done + common-modules: todo + config-flow-test-coverage: + status: todo + comment: + 1. The config flow is 100% covered, however some tests need to let HA create the flow + handler instead of doing it manually in the test. + 2. We should also make sure every test ends in either CREATE_ENTRY or ABORT so we test + that the flow is able to recover from an error. + config-flow: + status: todo + comment: | + 1. YAML import to be removed after core team meeting discussion on approach. + 2. Consider enhnacement to automatically select a host when multiple are discovered. + 3. Move hass.data[heos_discovered_hosts] into hass.data[heos] + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: todo + comment: | + Simplify by using async_on_remove instead of keeping track of listeners to remove + later in async_will_remove_from_hass. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: todo + test-before-setup: done + unique-config-entry: + status: todo + comment: | + The HEOS integration only supports a single config entry, but needs to be migrated to use + the `single_config_entry` flag. HEOS devices interconnect to each other, so connecting to + a single node yields access to all the devices setup with HEOS on your network. The HEOS API + documentation does not recommend connecting to multiple nodes which would provide no bennefit. + # Silver + action-exceptions: + status: todo + comment: Actions currently only log and instead should raise exceptions. + config-entry-unloading: done + docs-configuration-parameters: + status: done + comment: | + The integration doesn't provide any additional configuration parameters. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: todo + comment: | + The integration currently spams the logs until reconnected + parallel-updates: + status: todo + comment: Needs to be set to 0. The underlying library handles parallel updates. + reauthentication-flow: + status: exempt + comment: | + This integration doesn't require re-authentication. + test-coverage: + status: todo + comment: | + 1. Integration has >95% coverage, however tests need to be updated to not patch internals. + 2. test_async_setup_entry_connect_failure and test_async_setup_entry_player_failure -> Instead of + calling async_setup_entry directly, rather use hass.config_entries.async_setup and then assert + the config_entry.state is what we expect. + 3. test_unload_entry -> We should use hass.config_entries.async_unload and assert the entry state + 4. Recommend using snapshot in test_state_attributes. + 5. Find a way to avoid using internal dispatcher in test_updates_from_connection_event. + # Gold + devices: + status: todo + comment: | + The integraiton creates devices, but needs to stringify the id for the device identifier and + also migrate the device. + diagnostics: todo + discovery-update-info: + status: todo + comment: Explore if this is possible. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: + status: todo + comment: Has some troublehsooting setps, but needs to be improved + docs-use-cases: done + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: todo + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: + status: done + comment: The integration does not use websession + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 49f05b78a16..784573f5f8f 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -481,7 +481,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "hddtemp", "hdmi_cec", "heatmiser", - "heos", "here_travel_time", "hikvision", "hikvisioncam", From b8ce1b010f1d144fcea88f777eb6f93055e5e2ec Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 19:39:24 +0100 Subject: [PATCH 137/677] Update demetriek to v1.1.0 (#133064) --- homeassistant/components/lametric/manifest.json | 2 +- homeassistant/components/lametric/number.py | 4 +++- homeassistant/components/lametric/switch.py | 9 +++++++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../components/lametric/snapshots/test_diagnostics.ambr | 1 + 6 files changed, 14 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/lametric/manifest.json b/homeassistant/components/lametric/manifest.json index b930192caf0..5a066d015f2 100644 --- a/homeassistant/components/lametric/manifest.json +++ b/homeassistant/components/lametric/manifest.json @@ -13,7 +13,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["demetriek"], - "requirements": ["demetriek==1.0.0"], + "requirements": ["demetriek==1.1.0"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:LaMetric:1" diff --git a/homeassistant/components/lametric/number.py b/homeassistant/components/lametric/number.py index cea9debb04b..1025e04a4a8 100644 --- a/homeassistant/components/lametric/number.py +++ b/homeassistant/components/lametric/number.py @@ -25,6 +25,7 @@ class LaMetricNumberEntityDescription(NumberEntityDescription): """Class describing LaMetric number entities.""" value_fn: Callable[[Device], int | None] + has_fn: Callable[[Device], bool] = lambda device: True set_value_fn: Callable[[LaMetricDevice, float], Awaitable[Any]] @@ -49,7 +50,8 @@ NUMBERS = [ native_step=1, native_min_value=0, native_max_value=100, - value_fn=lambda device: device.audio.volume, + has_fn=lambda device: bool(device.audio), + value_fn=lambda device: device.audio.volume if device.audio else 0, set_value_fn=lambda api, volume: api.audio(volume=int(volume)), ), ] diff --git a/homeassistant/components/lametric/switch.py b/homeassistant/components/lametric/switch.py index 9689bb7b802..3aabfaf17e1 100644 --- a/homeassistant/components/lametric/switch.py +++ b/homeassistant/components/lametric/switch.py @@ -25,6 +25,7 @@ class LaMetricSwitchEntityDescription(SwitchEntityDescription): """Class describing LaMetric switch entities.""" available_fn: Callable[[Device], bool] = lambda device: True + has_fn: Callable[[Device], bool] = lambda device: True is_on_fn: Callable[[Device], bool] set_fn: Callable[[LaMetricDevice, bool], Awaitable[Any]] @@ -34,8 +35,11 @@ SWITCHES = [ key="bluetooth", translation_key="bluetooth", entity_category=EntityCategory.CONFIG, - available_fn=lambda device: device.bluetooth.available, - is_on_fn=lambda device: device.bluetooth.active, + available_fn=lambda device: bool( + device.bluetooth and device.bluetooth.available + ), + has_fn=lambda device: bool(device.bluetooth), + is_on_fn=lambda device: bool(device.bluetooth and device.bluetooth.active), set_fn=lambda api, active: api.bluetooth(active=active), ), ] @@ -54,6 +58,7 @@ async def async_setup_entry( description=description, ) for description in SWITCHES + if description.has_fn(coordinator.data) ) diff --git a/requirements_all.txt b/requirements_all.txt index e866ba901cc..c361ffec5a8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -747,7 +747,7 @@ defusedxml==0.7.1 deluge-client==1.10.2 # homeassistant.components.lametric -demetriek==1.0.0 +demetriek==1.1.0 # homeassistant.components.denonavr denonavr==1.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b93673f45bd..1c918cb2f1c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -637,7 +637,7 @@ defusedxml==0.7.1 deluge-client==1.10.2 # homeassistant.components.lametric -demetriek==1.0.0 +demetriek==1.1.0 # homeassistant.components.denonavr denonavr==1.0.1 diff --git a/tests/components/lametric/snapshots/test_diagnostics.ambr b/tests/components/lametric/snapshots/test_diagnostics.ambr index 15b35576ad4..7517cfe035e 100644 --- a/tests/components/lametric/snapshots/test_diagnostics.ambr +++ b/tests/components/lametric/snapshots/test_diagnostics.ambr @@ -26,6 +26,7 @@ 'brightness_mode': 'auto', 'display_type': 'mixed', 'height': 8, + 'on': None, 'screensaver': dict({ 'enabled': False, }), From 3c7502dd5da287992375056c27ef6eacd01b2523 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 19:46:35 +0100 Subject: [PATCH 138/677] Explicitly pass config entry to coordinator in Tailwind (#133065) --- homeassistant/components/tailwind/__init__.py | 3 +-- homeassistant/components/tailwind/binary_sensor.py | 2 +- homeassistant/components/tailwind/button.py | 2 +- homeassistant/components/tailwind/coordinator.py | 5 ++++- homeassistant/components/tailwind/cover.py | 2 +- homeassistant/components/tailwind/diagnostics.py | 2 +- homeassistant/components/tailwind/number.py | 2 +- homeassistant/components/tailwind/typing.py | 7 ------- 8 files changed, 10 insertions(+), 15 deletions(-) delete mode 100644 homeassistant/components/tailwind/typing.py diff --git a/homeassistant/components/tailwind/__init__.py b/homeassistant/components/tailwind/__init__.py index 6f1a234e94a..c48f5344763 100644 --- a/homeassistant/components/tailwind/__init__.py +++ b/homeassistant/components/tailwind/__init__.py @@ -8,8 +8,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from .const import DOMAIN -from .coordinator import TailwindDataUpdateCoordinator -from .typing import TailwindConfigEntry +from .coordinator import TailwindConfigEntry, TailwindDataUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.COVER, Platform.NUMBER] diff --git a/homeassistant/components/tailwind/binary_sensor.py b/homeassistant/components/tailwind/binary_sensor.py index 0ce0b4bd964..d2f8e1e2ced 100644 --- a/homeassistant/components/tailwind/binary_sensor.py +++ b/homeassistant/components/tailwind/binary_sensor.py @@ -16,8 +16,8 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .coordinator import TailwindConfigEntry from .entity import TailwindDoorEntity -from .typing import TailwindConfigEntry @dataclass(kw_only=True, frozen=True) diff --git a/homeassistant/components/tailwind/button.py b/homeassistant/components/tailwind/button.py index 2a675bbfdf7..edff3434866 100644 --- a/homeassistant/components/tailwind/button.py +++ b/homeassistant/components/tailwind/button.py @@ -19,8 +19,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN +from .coordinator import TailwindConfigEntry from .entity import TailwindEntity -from .typing import TailwindConfigEntry @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tailwind/coordinator.py b/homeassistant/components/tailwind/coordinator.py index 4d1b4af74c9..770751ccc3b 100644 --- a/homeassistant/components/tailwind/coordinator.py +++ b/homeassistant/components/tailwind/coordinator.py @@ -18,11 +18,13 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN, LOGGER +type TailwindConfigEntry = ConfigEntry[TailwindDataUpdateCoordinator] + class TailwindDataUpdateCoordinator(DataUpdateCoordinator[TailwindDeviceStatus]): """Class to manage fetching Tailwind data.""" - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, entry: TailwindConfigEntry) -> None: """Initialize the coordinator.""" self.tailwind = Tailwind( host=entry.data[CONF_HOST], @@ -32,6 +34,7 @@ class TailwindDataUpdateCoordinator(DataUpdateCoordinator[TailwindDeviceStatus]) super().__init__( hass, LOGGER, + config_entry=entry, name=f"{DOMAIN}_{entry.data[CONF_HOST]}", update_interval=timedelta(seconds=5), ) diff --git a/homeassistant/components/tailwind/cover.py b/homeassistant/components/tailwind/cover.py index 116fb4a9e6c..8ea1c7d4f6d 100644 --- a/homeassistant/components/tailwind/cover.py +++ b/homeassistant/components/tailwind/cover.py @@ -23,8 +23,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN, LOGGER +from .coordinator import TailwindConfigEntry from .entity import TailwindDoorEntity -from .typing import TailwindConfigEntry async def async_setup_entry( diff --git a/homeassistant/components/tailwind/diagnostics.py b/homeassistant/components/tailwind/diagnostics.py index 5d681356647..b7a51b56775 100644 --- a/homeassistant/components/tailwind/diagnostics.py +++ b/homeassistant/components/tailwind/diagnostics.py @@ -6,7 +6,7 @@ from typing import Any from homeassistant.core import HomeAssistant -from .typing import TailwindConfigEntry +from .coordinator import TailwindConfigEntry async def async_get_config_entry_diagnostics( diff --git a/homeassistant/components/tailwind/number.py b/homeassistant/components/tailwind/number.py index 0ff1f444280..b67df9a6a25 100644 --- a/homeassistant/components/tailwind/number.py +++ b/homeassistant/components/tailwind/number.py @@ -15,8 +15,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN +from .coordinator import TailwindConfigEntry from .entity import TailwindEntity -from .typing import TailwindConfigEntry @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/tailwind/typing.py b/homeassistant/components/tailwind/typing.py deleted file mode 100644 index 514a94a8e78..00000000000 --- a/homeassistant/components/tailwind/typing.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Typings for the Tailwind integration.""" - -from homeassistant.config_entries import ConfigEntry - -from .coordinator import TailwindDataUpdateCoordinator - -type TailwindConfigEntry = ConfigEntry[TailwindDataUpdateCoordinator] From 40c3dd2095167c48c1ffd4dbcc16796d21393af5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:08:07 +0100 Subject: [PATCH 139/677] Migrate group light tests to use Kelvin (#133010) --- tests/components/group/test_light.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/components/group/test_light.py b/tests/components/group/test_light.py index af8556b5450..91604d663b3 100644 --- a/tests/components/group/test_light.py +++ b/tests/components/group/test_light.py @@ -12,7 +12,6 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, @@ -792,19 +791,19 @@ async def test_emulated_color_temp_group(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.light_group", ATTR_COLOR_TEMP: 200}, + {ATTR_ENTITY_ID: "light.light_group", ATTR_COLOR_TEMP_KELVIN: 5000}, blocking=True, ) await hass.async_block_till_done() state = hass.states.get("light.test1") assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 200 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 5000 assert ATTR_HS_COLOR in state.attributes state = hass.states.get("light.test2") assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 200 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 5000 assert ATTR_HS_COLOR in state.attributes state = hass.states.get("light.test3") From ce70cb9e3370fbcba1ed79c7183ae4e279457477 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Thu, 12 Dec 2024 20:13:41 +0100 Subject: [PATCH 140/677] Use ConfigEntry runtime_data in easyEnergy (#133053) --- .../components/easyenergy/__init__.py | 18 +++---- .../components/easyenergy/coordinator.py | 7 ++- .../components/easyenergy/diagnostics.py | 50 +++++++++---------- homeassistant/components/easyenergy/sensor.py | 13 +++-- .../components/easyenergy/services.py | 9 ++-- tests/components/easyenergy/test_init.py | 2 - 6 files changed, 49 insertions(+), 50 deletions(-) diff --git a/homeassistant/components/easyenergy/__init__.py b/homeassistant/components/easyenergy/__init__.py index e520631158a..0548431f09d 100644 --- a/homeassistant/components/easyenergy/__init__.py +++ b/homeassistant/components/easyenergy/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -10,10 +9,10 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .coordinator import EasyEnergyDataUpdateCoordinator +from .coordinator import EasyEnergyConfigEntry, EasyEnergyDataUpdateCoordinator from .services import async_setup_services -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -25,25 +24,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: EasyEnergyConfigEntry) -> bool: """Set up easyEnergy from a config entry.""" - coordinator = EasyEnergyDataUpdateCoordinator(hass) + coordinator = EasyEnergyDataUpdateCoordinator(hass, entry) try: await coordinator.async_config_entry_first_refresh() except ConfigEntryNotReady: await coordinator.easyenergy.close() raise - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: EasyEnergyConfigEntry) -> bool: """Unload easyEnergy config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/easyenergy/coordinator.py b/homeassistant/components/easyenergy/coordinator.py index 8c1c593af93..e36bdf188ee 100644 --- a/homeassistant/components/easyenergy/coordinator.py +++ b/homeassistant/components/easyenergy/coordinator.py @@ -21,6 +21,8 @@ from homeassistant.util import dt as dt_util from .const import DOMAIN, LOGGER, SCAN_INTERVAL, THRESHOLD_HOUR +type EasyEnergyConfigEntry = ConfigEntry[EasyEnergyDataUpdateCoordinator] + class EasyEnergyData(NamedTuple): """Class for defining data in dict.""" @@ -33,15 +35,16 @@ class EasyEnergyData(NamedTuple): class EasyEnergyDataUpdateCoordinator(DataUpdateCoordinator[EasyEnergyData]): """Class to manage fetching easyEnergy data from single endpoint.""" - config_entry: ConfigEntry + config_entry: EasyEnergyConfigEntry - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, entry: EasyEnergyConfigEntry) -> None: """Initialize global easyEnergy data updater.""" super().__init__( hass, LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL, + config_entry=entry, ) self.easyenergy = EasyEnergy(session=async_get_clientsession(hass)) diff --git a/homeassistant/components/easyenergy/diagnostics.py b/homeassistant/components/easyenergy/diagnostics.py index d6912e1c926..64f30ba61fd 100644 --- a/homeassistant/components/easyenergy/diagnostics.py +++ b/homeassistant/components/easyenergy/diagnostics.py @@ -5,12 +5,9 @@ from __future__ import annotations from datetime import timedelta from typing import Any -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import EasyEnergyDataUpdateCoordinator -from .const import DOMAIN -from .coordinator import EasyEnergyData +from .coordinator import EasyEnergyConfigEntry, EasyEnergyData def get_gas_price(data: EasyEnergyData, hours: int) -> float | None: @@ -32,41 +29,42 @@ def get_gas_price(data: EasyEnergyData, hours: int) -> float | None: async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: EasyEnergyConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: EasyEnergyDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator_data = entry.runtime_data.data + energy_today = coordinator_data.energy_today return { "entry": { "title": entry.title, }, "energy_usage": { - "current_hour_price": coordinator.data.energy_today.current_usage_price, - "next_hour_price": coordinator.data.energy_today.price_at_time( - coordinator.data.energy_today.utcnow() + timedelta(hours=1) + "current_hour_price": energy_today.current_usage_price, + "next_hour_price": energy_today.price_at_time( + energy_today.utcnow() + timedelta(hours=1) ), - "average_price": coordinator.data.energy_today.average_usage_price, - "max_price": coordinator.data.energy_today.extreme_usage_prices[1], - "min_price": coordinator.data.energy_today.extreme_usage_prices[0], - "highest_price_time": coordinator.data.energy_today.highest_usage_price_time, - "lowest_price_time": coordinator.data.energy_today.lowest_usage_price_time, - "percentage_of_max": coordinator.data.energy_today.pct_of_max_usage, + "average_price": energy_today.average_usage_price, + "max_price": energy_today.extreme_usage_prices[1], + "min_price": energy_today.extreme_usage_prices[0], + "highest_price_time": energy_today.highest_usage_price_time, + "lowest_price_time": energy_today.lowest_usage_price_time, + "percentage_of_max": energy_today.pct_of_max_usage, }, "energy_return": { - "current_hour_price": coordinator.data.energy_today.current_return_price, - "next_hour_price": coordinator.data.energy_today.price_at_time( - coordinator.data.energy_today.utcnow() + timedelta(hours=1), "return" + "current_hour_price": energy_today.current_return_price, + "next_hour_price": energy_today.price_at_time( + energy_today.utcnow() + timedelta(hours=1), "return" ), - "average_price": coordinator.data.energy_today.average_return_price, - "max_price": coordinator.data.energy_today.extreme_return_prices[1], - "min_price": coordinator.data.energy_today.extreme_return_prices[0], - "highest_price_time": coordinator.data.energy_today.highest_return_price_time, - "lowest_price_time": coordinator.data.energy_today.lowest_return_price_time, - "percentage_of_max": coordinator.data.energy_today.pct_of_max_return, + "average_price": energy_today.average_return_price, + "max_price": energy_today.extreme_return_prices[1], + "min_price": energy_today.extreme_return_prices[0], + "highest_price_time": energy_today.highest_return_price_time, + "lowest_price_time": energy_today.lowest_return_price_time, + "percentage_of_max": energy_today.pct_of_max_return, }, "gas": { - "current_hour_price": get_gas_price(coordinator.data, 0), - "next_hour_price": get_gas_price(coordinator.data, 1), + "current_hour_price": get_gas_price(coordinator_data, 0), + "next_hour_price": get_gas_price(coordinator_data, 1), }, } diff --git a/homeassistant/components/easyenergy/sensor.py b/homeassistant/components/easyenergy/sensor.py index 65fe2558d46..6976a38da49 100644 --- a/homeassistant/components/easyenergy/sensor.py +++ b/homeassistant/components/easyenergy/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CURRENCY_EURO, PERCENTAGE, @@ -27,7 +26,11 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, SERVICE_TYPE_DEVICE_NAMES -from .coordinator import EasyEnergyData, EasyEnergyDataUpdateCoordinator +from .coordinator import ( + EasyEnergyConfigEntry, + EasyEnergyData, + EasyEnergyDataUpdateCoordinator, +) @dataclass(frozen=True, kw_only=True) @@ -208,10 +211,12 @@ def get_gas_price(data: EasyEnergyData, hours: int) -> float | None: async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: EasyEnergyConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up easyEnergy sensors based on a config entry.""" - coordinator: EasyEnergyDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( EasyEnergySensorEntity(coordinator=coordinator, description=description) for description in SENSORS diff --git a/homeassistant/components/easyenergy/services.py b/homeassistant/components/easyenergy/services.py index 5b80cfafd08..cb5424496ac 100644 --- a/homeassistant/components/easyenergy/services.py +++ b/homeassistant/components/easyenergy/services.py @@ -10,7 +10,7 @@ from typing import Final from easyenergy import Electricity, Gas, VatOption import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import ( HomeAssistant, ServiceCall, @@ -23,7 +23,7 @@ from homeassistant.helpers import selector from homeassistant.util import dt as dt_util from .const import DOMAIN -from .coordinator import EasyEnergyDataUpdateCoordinator +from .coordinator import EasyEnergyConfigEntry, EasyEnergyDataUpdateCoordinator ATTR_CONFIG_ENTRY: Final = "config_entry" ATTR_START: Final = "start" @@ -91,7 +91,7 @@ def __get_coordinator( ) -> EasyEnergyDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] - entry: ConfigEntry | None = hass.config_entries.async_get_entry(entry_id) + entry: EasyEnergyConfigEntry | None = hass.config_entries.async_get_entry(entry_id) if not entry: raise ServiceValidationError( @@ -110,8 +110,7 @@ def __get_coordinator( }, ) - coordinator: EasyEnergyDataUpdateCoordinator = hass.data[DOMAIN][entry_id] - return coordinator + return entry.runtime_data async def __get_prices( diff --git a/tests/components/easyenergy/test_init.py b/tests/components/easyenergy/test_init.py index 74293049fd1..c3c917bc9ed 100644 --- a/tests/components/easyenergy/test_init.py +++ b/tests/components/easyenergy/test_init.py @@ -4,7 +4,6 @@ from unittest.mock import MagicMock, patch from easyenergy import EasyEnergyConnectionError -from homeassistant.components.easyenergy.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -24,7 +23,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED From 32c1b519ad1940659eabd5e78fde831fb3243946 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:14:56 +0100 Subject: [PATCH 141/677] Improve auth generic typing (#133061) --- homeassistant/auth/__init__.py | 2 +- homeassistant/auth/mfa_modules/__init__.py | 18 ++++++++++++++---- homeassistant/auth/mfa_modules/notify.py | 6 ++---- homeassistant/auth/mfa_modules/totp.py | 5 ++--- homeassistant/auth/providers/__init__.py | 14 ++++++++++---- homeassistant/auth/providers/command_line.py | 14 ++++++++------ homeassistant/auth/providers/homeassistant.py | 6 +++--- .../auth/providers/insecure_example.py | 9 +++++---- .../auth/providers/trusted_networks.py | 10 +++++----- 9 files changed, 50 insertions(+), 34 deletions(-) diff --git a/homeassistant/auth/__init__.py b/homeassistant/auth/__init__.py index 21a4b6113d0..afe3b2d7aa3 100644 --- a/homeassistant/auth/__init__.py +++ b/homeassistant/auth/__init__.py @@ -115,7 +115,7 @@ class AuthManagerFlowManager( *, context: AuthFlowContext | None = None, data: dict[str, Any] | None = None, - ) -> LoginFlow: + ) -> LoginFlow[Any]: """Create a login flow.""" auth_provider = self.auth_manager.get_auth_provider(*handler_key) if not auth_provider: diff --git a/homeassistant/auth/mfa_modules/__init__.py b/homeassistant/auth/mfa_modules/__init__.py index d57a274c7ff..8a6430d770a 100644 --- a/homeassistant/auth/mfa_modules/__init__.py +++ b/homeassistant/auth/mfa_modules/__init__.py @@ -4,8 +4,9 @@ from __future__ import annotations import logging import types -from typing import Any +from typing import Any, Generic +from typing_extensions import TypeVar import voluptuous as vol from voluptuous.humanize import humanize_error @@ -34,6 +35,12 @@ DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed") _LOGGER = logging.getLogger(__name__) +_MultiFactorAuthModuleT = TypeVar( + "_MultiFactorAuthModuleT", + bound="MultiFactorAuthModule", + default="MultiFactorAuthModule", +) + class MultiFactorAuthModule: """Multi-factor Auth Module of validation function.""" @@ -71,7 +78,7 @@ class MultiFactorAuthModule: """Return a voluptuous schema to define mfa auth module's input.""" raise NotImplementedError - async def async_setup_flow(self, user_id: str) -> SetupFlow: + async def async_setup_flow(self, user_id: str) -> SetupFlow[Any]: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow @@ -95,11 +102,14 @@ class MultiFactorAuthModule: raise NotImplementedError -class SetupFlow(data_entry_flow.FlowHandler): +class SetupFlow(data_entry_flow.FlowHandler, Generic[_MultiFactorAuthModuleT]): """Handler for the setup flow.""" def __init__( - self, auth_module: MultiFactorAuthModule, setup_schema: vol.Schema, user_id: str + self, + auth_module: _MultiFactorAuthModuleT, + setup_schema: vol.Schema, + user_id: str, ) -> None: """Initialize the setup flow.""" self._auth_module = auth_module diff --git a/homeassistant/auth/mfa_modules/notify.py b/homeassistant/auth/mfa_modules/notify.py index d2010dc2c9d..b60a3012aac 100644 --- a/homeassistant/auth/mfa_modules/notify.py +++ b/homeassistant/auth/mfa_modules/notify.py @@ -162,7 +162,7 @@ class NotifyAuthModule(MultiFactorAuthModule): return sorted(unordered_services) - async def async_setup_flow(self, user_id: str) -> SetupFlow: + async def async_setup_flow(self, user_id: str) -> NotifySetupFlow: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow @@ -268,7 +268,7 @@ class NotifyAuthModule(MultiFactorAuthModule): await self.hass.services.async_call("notify", notify_service, data) -class NotifySetupFlow(SetupFlow): +class NotifySetupFlow(SetupFlow[NotifyAuthModule]): """Handler for the setup flow.""" def __init__( @@ -280,8 +280,6 @@ class NotifySetupFlow(SetupFlow): ) -> None: """Initialize the setup flow.""" super().__init__(auth_module, setup_schema, user_id) - # to fix typing complaint - self._auth_module: NotifyAuthModule = auth_module self._available_notify_services = available_notify_services self._secret: str | None = None self._count: int | None = None diff --git a/homeassistant/auth/mfa_modules/totp.py b/homeassistant/auth/mfa_modules/totp.py index 3306f76217f..625b273f39a 100644 --- a/homeassistant/auth/mfa_modules/totp.py +++ b/homeassistant/auth/mfa_modules/totp.py @@ -114,7 +114,7 @@ class TotpAuthModule(MultiFactorAuthModule): self._users[user_id] = ota_secret # type: ignore[index] return ota_secret - async def async_setup_flow(self, user_id: str) -> SetupFlow: + async def async_setup_flow(self, user_id: str) -> TotpSetupFlow: """Return a data entry flow handler for setup module. Mfa module should extend SetupFlow @@ -174,10 +174,9 @@ class TotpAuthModule(MultiFactorAuthModule): return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1)) -class TotpSetupFlow(SetupFlow): +class TotpSetupFlow(SetupFlow[TotpAuthModule]): """Handler for the setup flow.""" - _auth_module: TotpAuthModule _ota_secret: str _url: str _image: str diff --git a/homeassistant/auth/providers/__init__.py b/homeassistant/auth/providers/__init__.py index 34278c47df7..02f99e7bd71 100644 --- a/homeassistant/auth/providers/__init__.py +++ b/homeassistant/auth/providers/__init__.py @@ -5,8 +5,9 @@ from __future__ import annotations from collections.abc import Mapping import logging import types -from typing import Any +from typing import Any, Generic +from typing_extensions import TypeVar import voluptuous as vol from voluptuous.humanize import humanize_error @@ -46,6 +47,8 @@ AUTH_PROVIDER_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) +_AuthProviderT = TypeVar("_AuthProviderT", bound="AuthProvider", default="AuthProvider") + class AuthProvider: """Provider of user authentication.""" @@ -105,7 +108,7 @@ class AuthProvider: # Implement by extending class - async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow[Any]: """Return the data flow for logging in with auth provider. Auth provider should extend LoginFlow and return an instance. @@ -192,12 +195,15 @@ async def load_auth_provider_module( return module -class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]): +class LoginFlow( + FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]], + Generic[_AuthProviderT], +): """Handler for the login flow.""" _flow_result = AuthFlowResult - def __init__(self, auth_provider: AuthProvider) -> None: + def __init__(self, auth_provider: _AuthProviderT) -> None: """Initialize the login flow.""" self._auth_provider = auth_provider self._auth_module_id: str | None = None diff --git a/homeassistant/auth/providers/command_line.py b/homeassistant/auth/providers/command_line.py index 12447bc8c18..74630d925e1 100644 --- a/homeassistant/auth/providers/command_line.py +++ b/homeassistant/auth/providers/command_line.py @@ -6,7 +6,7 @@ import asyncio from collections.abc import Mapping import logging import os -from typing import Any, cast +from typing import Any import voluptuous as vol @@ -59,7 +59,9 @@ class CommandLineAuthProvider(AuthProvider): super().__init__(*args, **kwargs) self._user_meta: dict[str, dict[str, Any]] = {} - async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: + async def async_login_flow( + self, context: AuthFlowContext | None + ) -> CommandLineLoginFlow: """Return a flow to login.""" return CommandLineLoginFlow(self) @@ -133,7 +135,7 @@ class CommandLineAuthProvider(AuthProvider): ) -class CommandLineLoginFlow(LoginFlow): +class CommandLineLoginFlow(LoginFlow[CommandLineAuthProvider]): """Handler for the login flow.""" async def async_step_init( @@ -145,9 +147,9 @@ class CommandLineLoginFlow(LoginFlow): if user_input is not None: user_input["username"] = user_input["username"].strip() try: - await cast( - CommandLineAuthProvider, self._auth_provider - ).async_validate_login(user_input["username"], user_input["password"]) + await self._auth_provider.async_validate_login( + user_input["username"], user_input["password"] + ) except InvalidAuthError: errors["base"] = "invalid_auth" diff --git a/homeassistant/auth/providers/homeassistant.py b/homeassistant/auth/providers/homeassistant.py index e5dded74762..522e5d77a29 100644 --- a/homeassistant/auth/providers/homeassistant.py +++ b/homeassistant/auth/providers/homeassistant.py @@ -305,7 +305,7 @@ class HassAuthProvider(AuthProvider): await data.async_load() self.data = data - async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: + async def async_login_flow(self, context: AuthFlowContext | None) -> HassLoginFlow: """Return a flow to login.""" return HassLoginFlow(self) @@ -400,7 +400,7 @@ class HassAuthProvider(AuthProvider): pass -class HassLoginFlow(LoginFlow): +class HassLoginFlow(LoginFlow[HassAuthProvider]): """Handler for the login flow.""" async def async_step_init( @@ -411,7 +411,7 @@ class HassLoginFlow(LoginFlow): if user_input is not None: try: - await cast(HassAuthProvider, self._auth_provider).async_validate_login( + await self._auth_provider.async_validate_login( user_input["username"], user_input["password"] ) except InvalidAuth: diff --git a/homeassistant/auth/providers/insecure_example.py b/homeassistant/auth/providers/insecure_example.py index a7dced851a3..a92f5b55848 100644 --- a/homeassistant/auth/providers/insecure_example.py +++ b/homeassistant/auth/providers/insecure_example.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Mapping import hmac -from typing import cast import voluptuous as vol @@ -36,7 +35,9 @@ class InvalidAuthError(HomeAssistantError): class ExampleAuthProvider(AuthProvider): """Example auth provider based on hardcoded usernames and passwords.""" - async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: + async def async_login_flow( + self, context: AuthFlowContext | None + ) -> ExampleLoginFlow: """Return a flow to login.""" return ExampleLoginFlow(self) @@ -93,7 +94,7 @@ class ExampleAuthProvider(AuthProvider): return UserMeta(name=name, is_active=True) -class ExampleLoginFlow(LoginFlow): +class ExampleLoginFlow(LoginFlow[ExampleAuthProvider]): """Handler for the login flow.""" async def async_step_init( @@ -104,7 +105,7 @@ class ExampleLoginFlow(LoginFlow): if user_input is not None: try: - cast(ExampleAuthProvider, self._auth_provider).async_validate_login( + self._auth_provider.async_validate_login( user_input["username"], user_input["password"] ) except InvalidAuthError: diff --git a/homeassistant/auth/providers/trusted_networks.py b/homeassistant/auth/providers/trusted_networks.py index f32c35d4bd5..799fd4d2e16 100644 --- a/homeassistant/auth/providers/trusted_networks.py +++ b/homeassistant/auth/providers/trusted_networks.py @@ -104,7 +104,9 @@ class TrustedNetworksAuthProvider(AuthProvider): """Trusted Networks auth provider does not support MFA.""" return False - async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: + async def async_login_flow( + self, context: AuthFlowContext | None + ) -> TrustedNetworksLoginFlow: """Return a flow to login.""" assert context is not None ip_addr = cast(IPAddress, context.get("ip_address")) @@ -214,7 +216,7 @@ class TrustedNetworksAuthProvider(AuthProvider): self.async_validate_access(ip_address(remote_ip)) -class TrustedNetworksLoginFlow(LoginFlow): +class TrustedNetworksLoginFlow(LoginFlow[TrustedNetworksAuthProvider]): """Handler for the login flow.""" def __init__( @@ -235,9 +237,7 @@ class TrustedNetworksLoginFlow(LoginFlow): ) -> AuthFlowResult: """Handle the step of the form.""" try: - cast( - TrustedNetworksAuthProvider, self._auth_provider - ).async_validate_access(self._ip_address) + self._auth_provider.async_validate_access(self._ip_address) except InvalidAuthError: return self.async_abort(reason="not_allowed") From ad15786115673c5b3fe40ea2f5d61b4b896f433e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 12 Dec 2024 20:16:18 +0100 Subject: [PATCH 142/677] Add support for subentries to config entries (#117355) * Add support for subentries to config entries * Improve error handling and test coverage * Include subentry_id in subentry containers * Auto-generate subentry_id and add optional unique_id * Tweak * Update tests * Fix stale docstring * Address review comments * Typing tweaks * Add methods to ConfigEntries to add and remove subentry * Improve ConfigSubentryData typed dict * Update test snapshots * Adjust tests * Fix unique_id logic * Allow multiple subentries with None unique_id * Add number of subentries to config entry JSON representation * Add subentry translation support * Allow integrations to implement multiple subentry flows * Update translations schema * Adjust exception text * Change subentry flow init step to user * Prevent creating a subentry with colliding unique_id * Update tests * Address review comments * Remove duplicaetd unique_id collision check * Remove change from the future * Improve test coverage * Add default value for unique_id --- .../components/config/config_entries.py | 126 ++++ homeassistant/config_entries.py | 315 ++++++++- homeassistant/helpers/data_entry_flow.py | 4 +- script/hassfest/translations.py | 9 + tests/common.py | 2 + .../aemet/snapshots/test_diagnostics.ambr | 2 + .../airly/snapshots/test_diagnostics.ambr | 2 + .../airnow/snapshots/test_diagnostics.ambr | 2 + .../airvisual/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../airzone/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../axis/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../blink/snapshots/test_diagnostics.ambr | 2 + .../braviatv/snapshots/test_diagnostics.ambr | 2 + .../co2signal/snapshots/test_diagnostics.ambr | 2 + .../coinbase/snapshots/test_diagnostics.ambr | 2 + .../comelit/snapshots/test_diagnostics.ambr | 4 + .../components/config/test_config_entries.py | 469 +++++++++++++ .../deconz/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../ecovacs/snapshots/test_diagnostics.ambr | 4 + .../snapshots/test_config_flow.ambr | 4 + .../snapshots/test_diagnostics.ambr | 6 + .../esphome/snapshots/test_diagnostics.ambr | 2 + tests/components/esphome/test_diagnostics.py | 1 + .../forecast_solar/snapshots/test_init.ambr | 2 + .../fritz/snapshots/test_diagnostics.ambr | 2 + .../fronius/snapshots/test_diagnostics.ambr | 2 + .../fyta/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_config_flow.ambr | 8 + .../gios/snapshots/test_diagnostics.ambr | 2 + .../goodwe/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + tests/components/guardian/test_diagnostics.py | 1 + .../snapshots/test_config_flow.ambr | 16 + .../snapshots/test_diagnostics.ambr | 2 + .../imgw_pib/snapshots/test_diagnostics.ambr | 2 + .../iqvia/snapshots/test_diagnostics.ambr | 2 + .../kostal_plenticore/test_diagnostics.py | 1 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../madvr/snapshots/test_diagnostics.ambr | 2 + .../melcloud/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../netatmo/snapshots/test_diagnostics.ambr | 2 + .../nextdns/snapshots/test_diagnostics.ambr | 2 + .../nice_go/snapshots/test_diagnostics.ambr | 2 + tests/components/notion/test_diagnostics.py | 1 + .../onvif/snapshots/test_diagnostics.ambr | 2 + tests/components/openuv/test_diagnostics.py | 1 + .../p1_monitor/snapshots/test_init.ambr | 4 + .../snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../components/philips_js/test_config_flow.py | 1 + .../pi_hole/snapshots/test_diagnostics.ambr | 2 + .../proximity/snapshots/test_diagnostics.ambr | 2 + tests/components/ps4/test_init.py | 1 + .../components/purpleair/test_diagnostics.py | 1 + .../snapshots/test_diagnostics.ambr | 4 + .../snapshots/test_diagnostics.ambr | 4 + .../recollect_waste/test_diagnostics.py | 1 + .../ridwell/snapshots/test_diagnostics.ambr | 2 + .../components/samsungtv/test_diagnostics.py | 3 + .../snapshots/test_diagnostics.ambr | 2 + .../components/simplisafe/test_diagnostics.py | 1 + .../solarlog/snapshots/test_diagnostics.ambr | 2 + tests/components/subaru/test_config_flow.py | 2 + .../switcher_kis/test_diagnostics.py | 1 + .../snapshots/test_diagnostics.ambr | 4 + .../snapshots/test_diagnostics.ambr | 2 + .../tractive/snapshots/test_diagnostics.ambr | 2 + .../tuya/snapshots/test_config_flow.ambr | 8 + .../twinkly/snapshots/test_diagnostics.ambr | 2 + .../unifi/snapshots/test_diagnostics.ambr | 2 + .../uptime/snapshots/test_config_flow.ambr | 4 + .../snapshots/test_diagnostics.ambr | 2 + .../v2c/snapshots/test_diagnostics.ambr | 2 + .../vicare/snapshots/test_diagnostics.ambr | 2 + .../snapshots/test_diagnostics.ambr | 2 + .../watttime/snapshots/test_diagnostics.ambr | 2 + .../webmin/snapshots/test_diagnostics.ambr | 2 + tests/components/webostv/test_diagnostics.py | 1 + .../whirlpool/snapshots/test_diagnostics.ambr | 2 + .../whois/snapshots/test_config_flow.ambr | 20 + .../workday/snapshots/test_diagnostics.ambr | 2 + .../wyoming/snapshots/test_config_flow.ambr | 12 + .../zha/snapshots/test_diagnostics.ambr | 2 + tests/snapshots/test_config_entries.ambr | 2 + tests/test_config_entries.py | 637 +++++++++++++++++- 95 files changed, 1771 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/config/config_entries.py b/homeassistant/components/config/config_entries.py index da50f7e93a1..5794819995d 100644 --- a/homeassistant/components/config/config_entries.py +++ b/homeassistant/components/config/config_entries.py @@ -46,6 +46,13 @@ def async_setup(hass: HomeAssistant) -> bool: hass.http.register_view(OptionManagerFlowIndexView(hass.config_entries.options)) hass.http.register_view(OptionManagerFlowResourceView(hass.config_entries.options)) + hass.http.register_view( + SubentryManagerFlowIndexView(hass.config_entries.subentries) + ) + hass.http.register_view( + SubentryManagerFlowResourceView(hass.config_entries.subentries) + ) + websocket_api.async_register_command(hass, config_entries_get) websocket_api.async_register_command(hass, config_entry_disable) websocket_api.async_register_command(hass, config_entry_get_single) @@ -54,6 +61,9 @@ def async_setup(hass: HomeAssistant) -> bool: websocket_api.async_register_command(hass, config_entries_progress) websocket_api.async_register_command(hass, ignore_config_flow) + websocket_api.async_register_command(hass, config_subentry_delete) + websocket_api.async_register_command(hass, config_subentry_list) + return True @@ -285,6 +295,63 @@ class OptionManagerFlowResourceView( return await super().post(request, flow_id) +class SubentryManagerFlowIndexView( + FlowManagerIndexView[config_entries.ConfigSubentryFlowManager] +): + """View to create subentry flows.""" + + url = "/api/config/config_entries/subentries/flow" + name = "api:config:config_entries:subentries:flow" + + @require_admin( + error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) + ) + @RequestDataValidator( + vol.Schema( + { + vol.Required("handler"): vol.All(vol.Coerce(tuple), (str, str)), + vol.Optional("show_advanced_options", default=False): cv.boolean, + }, + extra=vol.ALLOW_EXTRA, + ) + ) + async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: + """Handle a POST request. + + handler in request is [entry_id, subentry_type]. + """ + return await super()._post_impl(request, data) + + def get_context(self, data: dict[str, Any]) -> dict[str, Any]: + """Return context.""" + context = super().get_context(data) + context["source"] = config_entries.SOURCE_USER + return context + + +class SubentryManagerFlowResourceView( + FlowManagerResourceView[config_entries.ConfigSubentryFlowManager] +): + """View to interact with the subentry flow manager.""" + + url = "/api/config/config_entries/subentries/flow/{flow_id}" + name = "api:config:config_entries:subentries:flow:resource" + + @require_admin( + error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) + ) + async def get(self, request: web.Request, /, flow_id: str) -> web.Response: + """Get the current state of a data_entry_flow.""" + return await super().get(request, flow_id) + + @require_admin( + error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) + ) + async def post(self, request: web.Request, flow_id: str) -> web.Response: + """Handle a POST request.""" + return await super().post(request, flow_id) + + @websocket_api.require_admin @websocket_api.websocket_command({"type": "config_entries/flow/progress"}) def config_entries_progress( @@ -588,3 +655,62 @@ async def _async_matching_config_entries_json_fragments( ) or (filter_is_not_helper and entry.domain not in integrations) ] + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + "type": "config_entries/subentries/list", + "entry_id": str, + } +) +@websocket_api.async_response +async def config_subentry_list( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """List subentries of a config entry.""" + entry = get_entry(hass, connection, msg["entry_id"], msg["id"]) + if entry is None: + return + + result = [ + { + "subentry_id": subentry.subentry_id, + "title": subentry.title, + "unique_id": subentry.unique_id, + } + for subentry_id, subentry in entry.subentries.items() + ] + connection.send_result(msg["id"], result) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + "type": "config_entries/subentries/delete", + "entry_id": str, + "subentry_id": str, + } +) +@websocket_api.async_response +async def config_subentry_delete( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Delete a subentry of a config entry.""" + entry = get_entry(hass, connection, msg["entry_id"], msg["id"]) + if entry is None: + return + + try: + hass.config_entries.async_remove_subentry(entry, msg["subentry_id"]) + except config_entries.UnknownSubEntry: + connection.send_error( + msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config subentry not found" + ) + return + + connection.send_result(msg["id"]) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index ade4cd855ca..d34828f5e46 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -15,6 +15,7 @@ from collections.abc import ( ) from contextvars import ContextVar from copy import deepcopy +from dataclasses import dataclass, field from datetime import datetime from enum import Enum, StrEnum import functools @@ -22,7 +23,7 @@ from functools import cache import logging from random import randint from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Generic, Self, cast +from typing import TYPE_CHECKING, Any, Generic, Self, TypedDict, cast from async_interrupt import interrupt from propcache import cached_property @@ -128,7 +129,7 @@ HANDLERS: Registry[str, type[ConfigFlow]] = Registry() STORAGE_KEY = "core.config_entries" STORAGE_VERSION = 1 -STORAGE_VERSION_MINOR = 4 +STORAGE_VERSION_MINOR = 5 SAVE_DELAY = 1 @@ -256,6 +257,10 @@ class UnknownEntry(ConfigError): """Unknown entry specified.""" +class UnknownSubEntry(ConfigError): + """Unknown subentry specified.""" + + class OperationNotAllowed(ConfigError): """Raised when a config entry operation is not allowed.""" @@ -300,6 +305,7 @@ class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False): minor_version: int options: Mapping[str, Any] + subentries: Iterable[ConfigSubentryData] version: int @@ -313,6 +319,51 @@ def _validate_item(*, disabled_by: ConfigEntryDisabler | Any | None = None) -> N ) +class ConfigSubentryData(TypedDict): + """Container for configuration subentry data. + + Returned by integrations, a subentry_id will be assigned automatically. + """ + + data: Mapping[str, Any] + title: str + unique_id: str | None + + +class ConfigSubentryDataWithId(ConfigSubentryData): + """Container for configuration subentry data. + + This type is used when loading existing subentries from storage. + """ + + subentry_id: str + + +class SubentryFlowResult(FlowResult[FlowContext, tuple[str, str]], total=False): + """Typed result dict for subentry flow.""" + + unique_id: str | None + + +@dataclass(frozen=True, kw_only=True) +class ConfigSubentry: + """Container for a configuration subentry.""" + + data: MappingProxyType[str, Any] + subentry_id: str = field(default_factory=ulid_util.ulid_now) + title: str + unique_id: str | None + + def as_dict(self) -> ConfigSubentryDataWithId: + """Return dictionary version of this subentry.""" + return { + "data": dict(self.data), + "subentry_id": self.subentry_id, + "title": self.title, + "unique_id": self.unique_id, + } + + class ConfigEntry(Generic[_DataT]): """Hold a configuration entry.""" @@ -322,6 +373,7 @@ class ConfigEntry(Generic[_DataT]): data: MappingProxyType[str, Any] runtime_data: _DataT options: MappingProxyType[str, Any] + subentries: MappingProxyType[str, ConfigSubentry] unique_id: str | None state: ConfigEntryState reason: str | None @@ -337,6 +389,7 @@ class ConfigEntry(Generic[_DataT]): supports_remove_device: bool | None _supports_options: bool | None _supports_reconfigure: bool | None + _supported_subentries: tuple[str, ...] | None update_listeners: list[UpdateListenerType] _async_cancel_retry_setup: Callable[[], Any] | None _on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None @@ -366,6 +419,7 @@ class ConfigEntry(Generic[_DataT]): pref_disable_polling: bool | None = None, source: str, state: ConfigEntryState = ConfigEntryState.NOT_LOADED, + subentries_data: Iterable[ConfigSubentryData | ConfigSubentryDataWithId] | None, title: str, unique_id: str | None, version: int, @@ -391,6 +445,24 @@ class ConfigEntry(Generic[_DataT]): # Entry options _setter(self, "options", MappingProxyType(options or {})) + # Subentries + subentries_data = subentries_data or () + subentries = {} + for subentry_data in subentries_data: + subentry_kwargs = {} + if "subentry_id" in subentry_data: + # If subentry_data has key "subentry_id", we're loading from storage + subentry_kwargs["subentry_id"] = subentry_data["subentry_id"] # type: ignore[typeddict-item] + subentry = ConfigSubentry( + data=MappingProxyType(subentry_data["data"]), + title=subentry_data["title"], + unique_id=subentry_data.get("unique_id"), + **subentry_kwargs, + ) + subentries[subentry.subentry_id] = subentry + + _setter(self, "subentries", MappingProxyType(subentries)) + # Entry system options if pref_disable_new_entities is None: pref_disable_new_entities = False @@ -427,6 +499,9 @@ class ConfigEntry(Generic[_DataT]): # Supports reconfigure _setter(self, "_supports_reconfigure", None) + # Supports subentries + _setter(self, "_supported_subentries", None) + # Listeners to call on update _setter(self, "update_listeners", []) @@ -499,6 +574,18 @@ class ConfigEntry(Generic[_DataT]): ) return self._supports_reconfigure or False + @property + def supported_subentries(self) -> tuple[str, ...]: + """Return supported subentries.""" + if self._supported_subentries is None and ( + handler := HANDLERS.get(self.domain) + ): + # work out sub entries supported by the handler + object.__setattr__( + self, "_supported_subentries", handler.async_supported_subentries(self) + ) + return self._supported_subentries or () + def clear_state_cache(self) -> None: """Clear cached properties that are included in as_json_fragment.""" self.__dict__.pop("as_json_fragment", None) @@ -518,12 +605,14 @@ class ConfigEntry(Generic[_DataT]): "supports_remove_device": self.supports_remove_device or False, "supports_unload": self.supports_unload or False, "supports_reconfigure": self.supports_reconfigure, + "supported_subentries": self.supported_subentries, "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, "disabled_by": self.disabled_by, "reason": self.reason, "error_reason_translation_key": self.error_reason_translation_key, "error_reason_translation_placeholders": self.error_reason_translation_placeholders, + "num_subentries": len(self.subentries), } return json_fragment(json_bytes(json_repr)) @@ -1018,6 +1107,7 @@ class ConfigEntry(Generic[_DataT]): "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, "source": self.source, + "subentries": [subentry.as_dict() for subentry in self.subentries.values()], "title": self.title, "unique_id": self.unique_id, "version": self.version, @@ -1503,6 +1593,7 @@ class ConfigEntriesFlowManager( minor_version=result["minor_version"], options=result["options"], source=flow.context["source"], + subentries_data=result["subentries"], title=result["title"], unique_id=flow.unique_id, version=result["version"], @@ -1793,6 +1884,11 @@ class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]): for entry in data["entries"]: entry["discovery_keys"] = {} + if old_minor_version < 5: + # Version 1.4 adds config subentries + for entry in data["entries"]: + entry.setdefault("subentries", entry.get("subentries", {})) + if old_major_version > 1: raise NotImplementedError return data @@ -1809,6 +1905,7 @@ class ConfigEntries: self.hass = hass self.flow = ConfigEntriesFlowManager(hass, self, hass_config) self.options = OptionsFlowManager(hass) + self.subentries = ConfigSubentryFlowManager(hass) self._hass_config = hass_config self._entries = ConfigEntryItems(hass) self._store = ConfigEntryStore(hass) @@ -2011,6 +2108,7 @@ class ConfigEntries: pref_disable_new_entities=entry["pref_disable_new_entities"], pref_disable_polling=entry["pref_disable_polling"], source=entry["source"], + subentries_data=entry["subentries"], title=entry["title"], unique_id=entry["unique_id"], version=entry["version"], @@ -2170,6 +2268,44 @@ class ConfigEntries: If the entry was changed, the update_listeners are fired and this function returns True + If the entry was not changed, the update_listeners are + not fired and this function returns False + """ + return self._async_update_entry( + entry, + data=data, + discovery_keys=discovery_keys, + minor_version=minor_version, + options=options, + pref_disable_new_entities=pref_disable_new_entities, + pref_disable_polling=pref_disable_polling, + title=title, + unique_id=unique_id, + version=version, + ) + + @callback + def _async_update_entry( + self, + entry: ConfigEntry, + *, + data: Mapping[str, Any] | UndefinedType = UNDEFINED, + discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]] + | UndefinedType = UNDEFINED, + minor_version: int | UndefinedType = UNDEFINED, + options: Mapping[str, Any] | UndefinedType = UNDEFINED, + pref_disable_new_entities: bool | UndefinedType = UNDEFINED, + pref_disable_polling: bool | UndefinedType = UNDEFINED, + subentries: dict[str, ConfigSubentry] | UndefinedType = UNDEFINED, + title: str | UndefinedType = UNDEFINED, + unique_id: str | None | UndefinedType = UNDEFINED, + version: int | UndefinedType = UNDEFINED, + ) -> bool: + """Update a config entry. + + If the entry was changed, the update_listeners are + fired and this function returns True + If the entry was not changed, the update_listeners are not fired and this function returns False """ @@ -2232,6 +2368,11 @@ class ConfigEntries: changed = True _setter(entry, "options", MappingProxyType(options)) + if subentries is not UNDEFINED: + if entry.subentries != subentries: + changed = True + _setter(entry, "subentries", MappingProxyType(subentries)) + if not changed: return False @@ -2249,6 +2390,37 @@ class ConfigEntries: self._async_dispatch(ConfigEntryChange.UPDATED, entry) return True + @callback + def async_add_subentry(self, entry: ConfigEntry, subentry: ConfigSubentry) -> bool: + """Add a subentry to a config entry.""" + self._raise_if_subentry_unique_id_exists(entry, subentry.unique_id) + + return self._async_update_entry( + entry, + subentries=entry.subentries | {subentry.subentry_id: subentry}, + ) + + @callback + def async_remove_subentry(self, entry: ConfigEntry, subentry_id: str) -> bool: + """Remove a subentry from a config entry.""" + subentries = dict(entry.subentries) + try: + subentries.pop(subentry_id) + except KeyError as err: + raise UnknownSubEntry from err + + return self._async_update_entry(entry, subentries=subentries) + + def _raise_if_subentry_unique_id_exists( + self, entry: ConfigEntry, unique_id: str | None + ) -> None: + """Raise if a subentry with the same unique_id exists.""" + if unique_id is None: + return + for existing_subentry in entry.subentries.values(): + if existing_subentry.unique_id == unique_id: + raise data_entry_flow.AbortFlow("already_configured") + @callback def _async_dispatch( self, change_type: ConfigEntryChange, entry: ConfigEntry @@ -2585,6 +2757,20 @@ class ConfigFlow(ConfigEntryBaseFlow): """Return options flow support for this handler.""" return cls.async_get_options_flow is not ConfigFlow.async_get_options_flow + @staticmethod + @callback + def async_get_subentry_flow( + config_entry: ConfigEntry, subentry_type: str + ) -> ConfigSubentryFlow: + """Get the subentry flow for this handler.""" + raise NotImplementedError + + @classmethod + @callback + def async_supported_subentries(cls, config_entry: ConfigEntry) -> tuple[str, ...]: + """Return subentries supported by this handler.""" + return () + @callback def _async_abort_entries_match( self, match_dict: dict[str, Any] | None = None @@ -2893,6 +3079,7 @@ class ConfigFlow(ConfigEntryBaseFlow): description: str | None = None, description_placeholders: Mapping[str, str] | None = None, options: Mapping[str, Any] | None = None, + subentries: Iterable[ConfigSubentryData] | None = None, ) -> ConfigFlowResult: """Finish config flow and create a config entry.""" if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: @@ -2912,6 +3099,7 @@ class ConfigFlow(ConfigEntryBaseFlow): result["minor_version"] = self.MINOR_VERSION result["options"] = options or {} + result["subentries"] = subentries or () result["version"] = self.VERSION return result @@ -3026,17 +3214,126 @@ class ConfigFlow(ConfigEntryBaseFlow): ) -class OptionsFlowManager( - data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult] -): - """Flow to set options for a configuration entry.""" +class _ConfigSubFlowManager: + """Mixin class for flow managers which manage flows tied to a config entry.""" - _flow_result = ConfigFlowResult + hass: HomeAssistant def _async_get_config_entry(self, config_entry_id: str) -> ConfigEntry: """Return config entry or raise if not found.""" return self.hass.config_entries.async_get_known_entry(config_entry_id) + +class ConfigSubentryFlowManager( + data_entry_flow.FlowManager[FlowContext, SubentryFlowResult, tuple[str, str]], + _ConfigSubFlowManager, +): + """Manage all the config subentry flows that are in progress.""" + + _flow_result = SubentryFlowResult + + async def async_create_flow( + self, + handler_key: tuple[str, str], + *, + context: FlowContext | None = None, + data: dict[str, Any] | None = None, + ) -> ConfigSubentryFlow: + """Create a subentry flow for a config entry. + + The entry_id and flow.handler[0] is the same thing to map entry with flow. + """ + if not context or "source" not in context: + raise KeyError("Context not set or doesn't have a source set") + + entry_id, subentry_type = handler_key + entry = self._async_get_config_entry(entry_id) + handler = await _async_get_flow_handler(self.hass, entry.domain, {}) + if subentry_type not in handler.async_supported_subentries(entry): + raise data_entry_flow.UnknownHandler( + f"Config entry '{entry.domain}' does not support subentry '{subentry_type}'" + ) + subentry_flow = handler.async_get_subentry_flow(entry, subentry_type) + subentry_flow.init_step = context["source"] + return subentry_flow + + async def async_finish_flow( + self, + flow: data_entry_flow.FlowHandler[ + FlowContext, SubentryFlowResult, tuple[str, str] + ], + result: SubentryFlowResult, + ) -> SubentryFlowResult: + """Finish a subentry flow and add a new subentry to the configuration entry. + + The flow.handler[0] and entry_id is the same thing to map flow with entry. + """ + flow = cast(ConfigSubentryFlow, flow) + + if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: + return result + + entry_id = flow.handler[0] + entry = self.hass.config_entries.async_get_entry(entry_id) + if entry is None: + raise UnknownEntry(entry_id) + + unique_id = result.get("unique_id") + if unique_id is not None and not isinstance(unique_id, str): + raise HomeAssistantError("unique_id must be a string") + + self.hass.config_entries.async_add_subentry( + entry, + ConfigSubentry( + data=MappingProxyType(result["data"]), + title=result["title"], + unique_id=unique_id, + ), + ) + + result["result"] = True + return result + + +class ConfigSubentryFlow( + data_entry_flow.FlowHandler[FlowContext, SubentryFlowResult, tuple[str, str]] +): + """Base class for config subentry flows.""" + + _flow_result = SubentryFlowResult + handler: tuple[str, str] + + @callback + def async_create_entry( + self, + *, + title: str | None = None, + data: Mapping[str, Any], + description: str | None = None, + description_placeholders: Mapping[str, str] | None = None, + unique_id: str | None = None, + ) -> SubentryFlowResult: + """Finish config flow and create a config entry.""" + result = super().async_create_entry( + title=title, + data=data, + description=description, + description_placeholders=description_placeholders, + ) + + result["unique_id"] = unique_id + + return result + + +class OptionsFlowManager( + data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult], + _ConfigSubFlowManager, +): + """Manage all the config entry option flows that are in progress.""" + + _flow_result = ConfigFlowResult + async def async_create_flow( self, handler_key: str, @@ -3046,7 +3343,7 @@ class OptionsFlowManager( ) -> OptionsFlow: """Create an options flow for a config entry. - Entry_id and flow.handler is the same thing to map entry with flow. + The entry_id and the flow.handler is the same thing to map entry with flow. """ entry = self._async_get_config_entry(handler_key) handler = await _async_get_flow_handler(self.hass, entry.domain, {}) @@ -3062,7 +3359,7 @@ class OptionsFlowManager( This method is called when a flow step returns FlowResultType.ABORT or FlowResultType.CREATE_ENTRY. - Flow.handler and entry_id is the same thing to map flow with entry. + The flow.handler and the entry_id is the same thing to map flow with entry. """ flow = cast(OptionsFlow, flow) diff --git a/homeassistant/helpers/data_entry_flow.py b/homeassistant/helpers/data_entry_flow.py index adb2062a8ea..e98061d50b7 100644 --- a/homeassistant/helpers/data_entry_flow.py +++ b/homeassistant/helpers/data_entry_flow.py @@ -18,7 +18,7 @@ from . import config_validation as cv _FlowManagerT = TypeVar( "_FlowManagerT", - bound=data_entry_flow.FlowManager[Any, Any], + bound=data_entry_flow.FlowManager[Any, Any, Any], default=data_entry_flow.FlowManager, ) @@ -71,7 +71,7 @@ class FlowManagerIndexView(_BaseFlowManagerView[_FlowManagerT]): async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Initialize a POST request. - Override `_post_impl` in subclasses which need + Override `post` and call `_post_impl` in subclasses which need to implement their own `RequestDataValidator` """ return await self._post_impl(request, data) diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index 2fb70b6e0be..078c649666d 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -285,6 +285,15 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: "user" if integration.integration_type == "helper" else None ), ), + vol.Optional("config_subentries"): cv.schema_with_slug_keys( + gen_data_entry_schema( + config=config, + integration=integration, + flow_title=REQUIRED, + require_step_title=False, + ), + slug_validator=vol.Any("_", cv.slug), + ), vol.Optional("options"): gen_data_entry_schema( config=config, integration=integration, diff --git a/tests/common.py b/tests/common.py index ac6f10b8c44..d2b0dff8faa 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1000,6 +1000,7 @@ class MockConfigEntry(config_entries.ConfigEntry): reason=None, source=config_entries.SOURCE_USER, state=None, + subentries_data=None, title="Mock Title", unique_id=None, version=1, @@ -1016,6 +1017,7 @@ class MockConfigEntry(config_entries.ConfigEntry): "options": options or {}, "pref_disable_new_entities": pref_disable_new_entities, "pref_disable_polling": pref_disable_polling, + "subentries_data": subentries_data or (), "title": title, "unique_id": unique_id, "version": version, diff --git a/tests/components/aemet/snapshots/test_diagnostics.ambr b/tests/components/aemet/snapshots/test_diagnostics.ambr index 54546507dfa..1e09a372352 100644 --- a/tests/components/aemet/snapshots/test_diagnostics.ambr +++ b/tests/components/aemet/snapshots/test_diagnostics.ambr @@ -21,6 +21,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airly/snapshots/test_diagnostics.ambr b/tests/components/airly/snapshots/test_diagnostics.ambr index ec501b2fd7e..1c760eaec52 100644 --- a/tests/components/airly/snapshots/test_diagnostics.ambr +++ b/tests/components/airly/snapshots/test_diagnostics.ambr @@ -19,6 +19,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Home', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airnow/snapshots/test_diagnostics.ambr b/tests/components/airnow/snapshots/test_diagnostics.ambr index 3dd4788dc61..73ba6a7123f 100644 --- a/tests/components/airnow/snapshots/test_diagnostics.ambr +++ b/tests/components/airnow/snapshots/test_diagnostics.ambr @@ -35,6 +35,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/airvisual/snapshots/test_diagnostics.ambr b/tests/components/airvisual/snapshots/test_diagnostics.ambr index 606d6082351..0dbdef1d508 100644 --- a/tests/components/airvisual/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual/snapshots/test_diagnostics.ambr @@ -47,6 +47,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 3, diff --git a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr index cb1d3a7aee7..113db6e3b96 100644 --- a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr @@ -101,6 +101,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'XXXXXXX', 'version': 1, diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index fb4f6530b1e..39668e3d19f 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -287,6 +287,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr index c6ad36916bf..4bd7bfaccdd 100644 --- a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr @@ -101,6 +101,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'installation1', 'version': 1, diff --git a/tests/components/ambient_station/snapshots/test_diagnostics.ambr b/tests/components/ambient_station/snapshots/test_diagnostics.ambr index 2f90b09d39f..07db19101ab 100644 --- a/tests/components/ambient_station/snapshots/test_diagnostics.ambr +++ b/tests/components/ambient_station/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/axis/snapshots/test_diagnostics.ambr b/tests/components/axis/snapshots/test_diagnostics.ambr index ebd0061f416..b475c796d2b 100644 --- a/tests/components/axis/snapshots/test_diagnostics.ambr +++ b/tests/components/axis/snapshots/test_diagnostics.ambr @@ -47,6 +47,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 3, diff --git a/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr index e9540b5cec6..d7f9a045921 100644 --- a/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr +++ b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr @@ -18,6 +18,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Beosound Balance-11111111', 'unique_id': '11111111', 'version': 1, diff --git a/tests/components/blink/snapshots/test_diagnostics.ambr b/tests/components/blink/snapshots/test_diagnostics.ambr index edc2879a66b..54df2b48cdb 100644 --- a/tests/components/blink/snapshots/test_diagnostics.ambr +++ b/tests/components/blink/snapshots/test_diagnostics.ambr @@ -48,6 +48,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 3, diff --git a/tests/components/braviatv/snapshots/test_diagnostics.ambr b/tests/components/braviatv/snapshots/test_diagnostics.ambr index cd29c647df7..de76c00cd23 100644 --- a/tests/components/braviatv/snapshots/test_diagnostics.ambr +++ b/tests/components/braviatv/snapshots/test_diagnostics.ambr @@ -19,6 +19,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'very_unique_string', 'version': 1, diff --git a/tests/components/co2signal/snapshots/test_diagnostics.ambr b/tests/components/co2signal/snapshots/test_diagnostics.ambr index 9218e7343ec..4159c8ec1a1 100644 --- a/tests/components/co2signal/snapshots/test_diagnostics.ambr +++ b/tests/components/co2signal/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/coinbase/snapshots/test_diagnostics.ambr b/tests/components/coinbase/snapshots/test_diagnostics.ambr index 51bd946f140..3eab18fb9f3 100644 --- a/tests/components/coinbase/snapshots/test_diagnostics.ambr +++ b/tests/components/coinbase/snapshots/test_diagnostics.ambr @@ -44,6 +44,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/comelit/snapshots/test_diagnostics.ambr b/tests/components/comelit/snapshots/test_diagnostics.ambr index 58ce74035f9..877f48a4611 100644 --- a/tests/components/comelit/snapshots/test_diagnostics.ambr +++ b/tests/components/comelit/snapshots/test_diagnostics.ambr @@ -71,6 +71,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, @@ -135,6 +137,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 4a3bff47d89..4d37f3c871b 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -137,11 +137,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": True, "supports_reconfigure": False, "supports_remove_device": False, @@ -155,11 +157,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": core_ce.ConfigEntryState.SETUP_ERROR.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -173,11 +177,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -191,11 +197,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -209,11 +217,13 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": core_ce.ConfigEntryState.NOT_LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -571,11 +581,13 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -586,6 +598,7 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "description_placeholders": None, "options": {}, "minor_version": 1, + "subentries": [], } @@ -654,11 +667,13 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -669,6 +684,7 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "description_placeholders": None, "options": {}, "minor_version": 1, + "subentries": [], } @@ -1088,6 +1104,273 @@ async def test_options_flow_with_invalid_data( assert data == {"errors": {"choices": "invalid is not a valid option"}} +async def test_subentry_flow(hass: HomeAssistant, client) -> None: + """Test we can start a subentry flow.""" + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_init(self, user_input=None): + raise NotImplementedError + + async def async_step_user(self, user_input=None): + schema = OrderedDict() + schema[vol.Required("enabled")] = bool + return self.async_show_form( + step_id="user", + data_schema=schema, + description_placeholders={"enabled": "Set to true to be true"}, + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + with patch.dict(HANDLERS, {"test": TestFlow}): + url = "/api/config/config_entries/subentries/flow" + resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + data.pop("flow_id") + assert data == { + "type": "form", + "handler": ["test1", "test"], + "step_id": "user", + "data_schema": [{"name": "enabled", "required": True, "type": "boolean"}], + "description_placeholders": {"enabled": "Set to true to be true"}, + "errors": None, + "last_step": None, + "preview": None, + } + + +@pytest.mark.parametrize( + ("endpoint", "method"), + [ + ("/api/config/config_entries/subentries/flow", "post"), + ("/api/config/config_entries/subentries/flow/1", "get"), + ("/api/config/config_entries/subentries/flow/1", "post"), + ], +) +async def test_subentry_flow_unauth( + hass: HomeAssistant, client, hass_admin_user: MockUser, endpoint: str, method: str +) -> None: + """Test unauthorized on subentry flow.""" + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_init(self, user_input=None): + schema = OrderedDict() + schema[vol.Required("enabled")] = bool + return self.async_show_form( + step_id="user", + data_schema=schema, + description_placeholders={"enabled": "Set to true to be true"}, + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + hass_admin_user.groups = [] + + with patch.dict(HANDLERS, {"test": TestFlow}): + resp = await getattr(client, method)(endpoint, json={"handler": entry.entry_id}) + + assert resp.status == HTTPStatus.UNAUTHORIZED + + +async def test_two_step_subentry_flow(hass: HomeAssistant, client) -> None: + """Test we can finish a two step subentry flow.""" + mock_integration( + hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) + ) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_user(self, user_input=None): + return await self.async_step_finish() + + async def async_step_finish(self, user_input=None): + if user_input: + return self.async_create_entry( + title="Mock title", data=user_input, unique_id="test" + ) + + return self.async_show_form( + step_id="finish", data_schema=vol.Schema({"enabled": bool}) + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + with patch.dict(HANDLERS, {"test": TestFlow}): + url = "/api/config/config_entries/subentries/flow" + resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + flow_id = data["flow_id"] + expected_data = { + "data_schema": [{"name": "enabled", "type": "boolean"}], + "description_placeholders": None, + "errors": None, + "flow_id": flow_id, + "handler": ["test1", "test"], + "last_step": None, + "preview": None, + "step_id": "finish", + "type": "form", + } + assert data == expected_data + + resp = await client.get(f"/api/config/config_entries/subentries/flow/{flow_id}") + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == expected_data + + resp = await client.post( + f"/api/config/config_entries/subentries/flow/{flow_id}", + json={"enabled": True}, + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + assert data == { + "description_placeholders": None, + "description": None, + "flow_id": flow_id, + "handler": ["test1", "test"], + "title": "Mock title", + "type": "create_entry", + "unique_id": "test", + } + + +async def test_subentry_flow_with_invalid_data(hass: HomeAssistant, client) -> None: + """Test a subentry flow with invalid_data.""" + mock_integration( + hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) + ) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(core_ce.ConfigFlow): + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + class SubentryFlowHandler(core_ce.ConfigSubentryFlow): + async def async_step_user(self, user_input=None): + return self.async_show_form( + step_id="finish", + data_schema=vol.Schema( + { + vol.Required( + "choices", default=["invalid", "valid"] + ): cv.multi_select({"valid": "Valid"}) + } + ), + ) + + async def async_step_finish(self, user_input=None): + return self.async_create_entry( + title="Enable disable", data=user_input + ) + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries(cls, config_entry): + return ("test",) + + MockConfigEntry( + domain="test", + entry_id="test1", + source="bla", + ).add_to_hass(hass) + entry = hass.config_entries.async_entries()[0] + + with patch.dict(HANDLERS, {"test": TestFlow}): + url = "/api/config/config_entries/subentries/flow" + resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + flow_id = data.pop("flow_id") + assert data == { + "type": "form", + "handler": ["test1", "test"], + "step_id": "finish", + "data_schema": [ + { + "default": ["invalid", "valid"], + "name": "choices", + "options": {"valid": "Valid"}, + "required": True, + "type": "multi_select", + } + ], + "description_placeholders": None, + "errors": None, + "last_step": None, + "preview": None, + } + + with patch.dict(HANDLERS, {"test": TestFlow}): + resp = await client.post( + f"/api/config/config_entries/subentries/flow/{flow_id}", + json={"choices": ["valid", "invalid"]}, + ) + assert resp.status == HTTPStatus.BAD_REQUEST + data = await resp.json() + assert data == {"errors": {"choices": "invalid is not a valid option"}} + + @pytest.mark.usefixtures("freezer") async def test_get_single( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -1120,11 +1403,13 @@ async def test_get_single( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "user", "state": "loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1480,11 +1765,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1499,11 +1786,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1518,11 +1807,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1537,11 +1828,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1556,11 +1849,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1586,11 +1881,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1615,11 +1912,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1634,11 +1933,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1663,11 +1964,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1682,11 +1985,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1717,11 +2022,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1736,11 +2043,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1755,11 +2064,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1774,11 +2085,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1793,11 +2106,13 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1900,11 +2215,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1922,11 +2239,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1944,11 +2263,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1972,11 +2293,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2001,11 +2324,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2029,11 +2354,13 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": entry.modified_at.timestamp(), + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2119,11 +2446,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2141,11 +2470,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2171,11 +2502,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2197,11 +2530,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2227,11 +2562,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2255,11 +2592,13 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": entry.modified_at.timestamp(), + "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", + "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2470,3 +2809,133 @@ async def test_does_not_support_reconfigure( response == '{"message":"Handler ConfigEntriesFlowManager doesn\'t support step reconfigure"}' ) + + +async def test_list_subentries( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test that we can list subentries.""" + assert await async_setup_component(hass, "config", {}) + ws_client = await hass_ws_client(hass) + + entry = MockConfigEntry( + domain="test", + state=core_ce.ConfigEntryState.LOADED, + subentries_data=[ + core_ce.ConfigSubentryData( + data={"test": "test"}, + subentry_id="mock_id", + title="Mock title", + unique_id="test", + ) + ], + ) + entry.add_to_hass(hass) + + assert entry.pref_disable_new_entities is False + assert entry.pref_disable_polling is False + + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/list", + "entry_id": entry.entry_id, + } + ) + response = await ws_client.receive_json() + + assert response["success"] + assert response["result"] == [ + {"subentry_id": "mock_id", "title": "Mock title", "unique_id": "test"}, + ] + + # Try listing subentries for an unknown entry + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/list", + "entry_id": "no_such_entry", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Config entry not found", + } + + +async def test_delete_subentry( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test that we can delete a subentry.""" + assert await async_setup_component(hass, "config", {}) + ws_client = await hass_ws_client(hass) + + entry = MockConfigEntry( + domain="test", + state=core_ce.ConfigEntryState.LOADED, + subentries_data=[ + core_ce.ConfigSubentryData( + data={"test": "test"}, subentry_id="mock_id", title="Mock title" + ) + ], + ) + entry.add_to_hass(hass) + + assert entry.pref_disable_new_entities is False + assert entry.pref_disable_polling is False + + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/delete", + "entry_id": entry.entry_id, + "subentry_id": "mock_id", + } + ) + response = await ws_client.receive_json() + + assert response["success"] + assert response["result"] is None + + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/list", + "entry_id": entry.entry_id, + } + ) + response = await ws_client.receive_json() + + assert response["success"] + assert response["result"] == [] + + # Try deleting the subentry again + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/delete", + "entry_id": entry.entry_id, + "subentry_id": "mock_id", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Config subentry not found", + } + + # Try deleting subentry from an unknown entry + await ws_client.send_json_auto_id( + { + "type": "config_entries/subentries/delete", + "entry_id": "no_such_entry", + "subentry_id": "mock_id", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Config entry not found", + } diff --git a/tests/components/deconz/snapshots/test_diagnostics.ambr b/tests/components/deconz/snapshots/test_diagnostics.ambr index 1ca674a4fbe..20558b4bbbd 100644 --- a/tests/components/deconz/snapshots/test_diagnostics.ambr +++ b/tests/components/deconz/snapshots/test_diagnostics.ambr @@ -21,6 +21,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr index abedc128756..0e507ca0b28 100644 --- a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr @@ -47,6 +47,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '123456', 'version': 1, diff --git a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr index 3da8c76c2b4..8fe6c7c2293 100644 --- a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr @@ -32,6 +32,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr index d407fe2dc5b..0a46dd7f476 100644 --- a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr +++ b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'dsmr_reader', 'unique_id': 'UNIQUE_TEST_ID', 'version': 1, diff --git a/tests/components/ecovacs/snapshots/test_diagnostics.ambr b/tests/components/ecovacs/snapshots/test_diagnostics.ambr index 38c8a9a5ab9..f9540e06038 100644 --- a/tests/components/ecovacs/snapshots/test_diagnostics.ambr +++ b/tests/components/ecovacs/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, @@ -70,6 +72,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/energyzero/snapshots/test_config_flow.ambr b/tests/components/energyzero/snapshots/test_config_flow.ambr index 72e504c97c8..88b0af6dc7b 100644 --- a/tests/components/energyzero/snapshots/test_config_flow.ambr +++ b/tests/components/energyzero/snapshots/test_config_flow.ambr @@ -28,10 +28,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'EnergyZero', 'unique_id': 'energyzero', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'EnergyZero', 'type': , 'version': 1, diff --git a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr index 76835098f27..3cacd3a8518 100644 --- a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr +++ b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr @@ -20,6 +20,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, @@ -454,6 +456,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, @@ -928,6 +932,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/esphome/snapshots/test_diagnostics.ambr b/tests/components/esphome/snapshots/test_diagnostics.ambr index 4f7ea679b20..8f1711e829e 100644 --- a/tests/components/esphome/snapshots/test_diagnostics.ambr +++ b/tests/components/esphome/snapshots/test_diagnostics.ambr @@ -20,6 +20,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'ESPHome Device', 'unique_id': '11:22:33:44:55:aa', 'version': 1, diff --git a/tests/components/esphome/test_diagnostics.py b/tests/components/esphome/test_diagnostics.py index 832e7d6572f..0beeae71df3 100644 --- a/tests/components/esphome/test_diagnostics.py +++ b/tests/components/esphome/test_diagnostics.py @@ -79,6 +79,7 @@ async def test_diagnostics_with_bluetooth( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "11:22:33:44:55:aa", "version": 1, diff --git a/tests/components/forecast_solar/snapshots/test_init.ambr b/tests/components/forecast_solar/snapshots/test_init.ambr index 6ae4c2f6198..c0db54c2d4e 100644 --- a/tests/components/forecast_solar/snapshots/test_init.ambr +++ b/tests/components/forecast_solar/snapshots/test_init.ambr @@ -23,6 +23,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Green House', 'unique_id': 'unique', 'version': 2, diff --git a/tests/components/fritz/snapshots/test_diagnostics.ambr b/tests/components/fritz/snapshots/test_diagnostics.ambr index 53f7093a21b..9b5b8c9353a 100644 --- a/tests/components/fritz/snapshots/test_diagnostics.ambr +++ b/tests/components/fritz/snapshots/test_diagnostics.ambr @@ -61,6 +61,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/fronius/snapshots/test_diagnostics.ambr b/tests/components/fronius/snapshots/test_diagnostics.ambr index 010de06e276..b112839835a 100644 --- a/tests/components/fronius/snapshots/test_diagnostics.ambr +++ b/tests/components/fronius/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index eb19797e5b1..f1792cb7535 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -19,6 +19,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'fyta_user', 'unique_id': None, 'version': 1, diff --git a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr index 6d521b1f2c8..10f23759fae 100644 --- a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr +++ b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr @@ -66,10 +66,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'bluetooth', + 'subentries': list([ + ]), 'title': 'Gardena Water Computer', 'unique_id': '00000000-0000-0000-0000-000000000001', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Gardena Water Computer', 'type': , 'version': 1, @@ -223,10 +227,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Gardena Water Computer', 'unique_id': '00000000-0000-0000-0000-000000000001', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Gardena Water Computer', 'type': , 'version': 1, diff --git a/tests/components/gios/snapshots/test_diagnostics.ambr b/tests/components/gios/snapshots/test_diagnostics.ambr index 71e0afdc495..890edc00482 100644 --- a/tests/components/gios/snapshots/test_diagnostics.ambr +++ b/tests/components/gios/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Home', 'unique_id': '123', 'version': 1, diff --git a/tests/components/goodwe/snapshots/test_diagnostics.ambr b/tests/components/goodwe/snapshots/test_diagnostics.ambr index f52e47688e8..40ed22195d5 100644 --- a/tests/components/goodwe/snapshots/test_diagnostics.ambr +++ b/tests/components/goodwe/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/google_assistant/snapshots/test_diagnostics.ambr b/tests/components/google_assistant/snapshots/test_diagnostics.ambr index edbbdb1ba28..1ecedbd1173 100644 --- a/tests/components/google_assistant/snapshots/test_diagnostics.ambr +++ b/tests/components/google_assistant/snapshots/test_diagnostics.ambr @@ -15,6 +15,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'import', + 'subentries': list([ + ]), 'title': '1234', 'unique_id': '1234', 'version': 1, diff --git a/tests/components/guardian/test_diagnostics.py b/tests/components/guardian/test_diagnostics.py index faba2103000..4487d0b6ac6 100644 --- a/tests/components/guardian/test_diagnostics.py +++ b/tests/components/guardian/test_diagnostics.py @@ -42,6 +42,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "data": { "valve_controller": { diff --git a/tests/components/homewizard/snapshots/test_config_flow.ambr b/tests/components/homewizard/snapshots/test_config_flow.ambr index 0a301fc3941..71e70f3a153 100644 --- a/tests/components/homewizard/snapshots/test_config_flow.ambr +++ b/tests/components/homewizard/snapshots/test_config_flow.ambr @@ -30,10 +30,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'P1 meter', 'type': , 'version': 1, @@ -74,10 +78,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'P1 meter', 'type': , 'version': 1, @@ -118,10 +126,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'Energy Socket', 'unique_id': 'HWE-SKT_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Energy Socket', 'type': , 'version': 1, @@ -158,10 +170,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'P1 meter', 'type': , 'version': 1, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index a0bb8302fcc..ce9fc9ac01a 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -190,6 +190,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Husqvarna Automower of Erika Mustermann', 'unique_id': '123', 'version': 1, diff --git a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr index 494980ba4ce..f15fc706d7e 100644 --- a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr +++ b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr @@ -15,6 +15,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'River Name (Station Name)', 'unique_id': '123', 'version': 1, diff --git a/tests/components/iqvia/snapshots/test_diagnostics.ambr b/tests/components/iqvia/snapshots/test_diagnostics.ambr index f2fa656cb0f..41cfedb0e29 100644 --- a/tests/components/iqvia/snapshots/test_diagnostics.ambr +++ b/tests/components/iqvia/snapshots/test_diagnostics.ambr @@ -358,6 +358,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/kostal_plenticore/test_diagnostics.py b/tests/components/kostal_plenticore/test_diagnostics.py index 08f06684d9a..3a99a7f681d 100644 --- a/tests/components/kostal_plenticore/test_diagnostics.py +++ b/tests/components/kostal_plenticore/test_diagnostics.py @@ -57,6 +57,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "client": { "version": "api_version='0.2.0' hostname='scb' name='PUCK RESTful API' sw_version='01.16.05025'", diff --git a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr index 201bbbc971e..640726e2355 100644 --- a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr +++ b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr @@ -25,6 +25,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr index c689d04949a..db82f41eb73 100644 --- a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr +++ b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr @@ -73,6 +73,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'test-site-name', 'unique_id': None, 'version': 1, diff --git a/tests/components/madvr/snapshots/test_diagnostics.ambr b/tests/components/madvr/snapshots/test_diagnostics.ambr index 3a281391860..92d0578dba8 100644 --- a/tests/components/madvr/snapshots/test_diagnostics.ambr +++ b/tests/components/madvr/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'envy', 'unique_id': '00:11:22:33:44:55', 'version': 1, diff --git a/tests/components/melcloud/snapshots/test_diagnostics.ambr b/tests/components/melcloud/snapshots/test_diagnostics.ambr index e6a432de07e..671f5afcc52 100644 --- a/tests/components/melcloud/snapshots/test_diagnostics.ambr +++ b/tests/components/melcloud/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'melcloud', 'unique_id': 'UNIQUE_TEST_ID', 'version': 1, diff --git a/tests/components/modern_forms/snapshots/test_diagnostics.ambr b/tests/components/modern_forms/snapshots/test_diagnostics.ambr index f8897a4a47f..1b4090ca5a4 100644 --- a/tests/components/modern_forms/snapshots/test_diagnostics.ambr +++ b/tests/components/modern_forms/snapshots/test_diagnostics.ambr @@ -16,6 +16,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'AA:BB:CC:DD:EE:FF', 'version': 1, diff --git a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr index 5b4b169c0fe..d042dc02ac3 100644 --- a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr +++ b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr @@ -28,6 +28,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/netatmo/snapshots/test_diagnostics.ambr b/tests/components/netatmo/snapshots/test_diagnostics.ambr index 463556ec657..4ea7e30bcf9 100644 --- a/tests/components/netatmo/snapshots/test_diagnostics.ambr +++ b/tests/components/netatmo/snapshots/test_diagnostics.ambr @@ -646,6 +646,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'netatmo', 'version': 1, diff --git a/tests/components/nextdns/snapshots/test_diagnostics.ambr b/tests/components/nextdns/snapshots/test_diagnostics.ambr index 827d6aeb6e5..23f42fee077 100644 --- a/tests/components/nextdns/snapshots/test_diagnostics.ambr +++ b/tests/components/nextdns/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Fake Profile', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr index f4ba363a421..b33726d2b72 100644 --- a/tests/components/nice_go/snapshots/test_diagnostics.ambr +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -60,6 +60,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/notion/test_diagnostics.py b/tests/components/notion/test_diagnostics.py index 890ce2dfc4a..c1d1bd1bb2e 100644 --- a/tests/components/notion/test_diagnostics.py +++ b/tests/components/notion/test_diagnostics.py @@ -37,6 +37,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "data": { "bridges": [ diff --git a/tests/components/onvif/snapshots/test_diagnostics.ambr b/tests/components/onvif/snapshots/test_diagnostics.ambr index c8a9ff75d62..c3938efcbb6 100644 --- a/tests/components/onvif/snapshots/test_diagnostics.ambr +++ b/tests/components/onvif/snapshots/test_diagnostics.ambr @@ -24,6 +24,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, diff --git a/tests/components/openuv/test_diagnostics.py b/tests/components/openuv/test_diagnostics.py index 61b68b5ad90..03b392b3e7b 100644 --- a/tests/components/openuv/test_diagnostics.py +++ b/tests/components/openuv/test_diagnostics.py @@ -39,6 +39,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "data": { "protection_window": { diff --git a/tests/components/p1_monitor/snapshots/test_init.ambr b/tests/components/p1_monitor/snapshots/test_init.ambr index d0a676fce1b..83684e153c9 100644 --- a/tests/components/p1_monitor/snapshots/test_init.ambr +++ b/tests/components/p1_monitor/snapshots/test_init.ambr @@ -16,6 +16,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'unique_thingy', 'version': 2, @@ -38,6 +40,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'unique_thingy', 'version': 2, diff --git a/tests/components/pegel_online/snapshots/test_diagnostics.ambr b/tests/components/pegel_online/snapshots/test_diagnostics.ambr index 1e55805f867..d0fdc81acb4 100644 --- a/tests/components/pegel_online/snapshots/test_diagnostics.ambr +++ b/tests/components/pegel_online/snapshots/test_diagnostics.ambr @@ -31,6 +31,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '70272185-xxxx-xxxx-xxxx-43bea330dcae', 'version': 1, diff --git a/tests/components/philips_js/snapshots/test_diagnostics.ambr b/tests/components/philips_js/snapshots/test_diagnostics.ambr index 4f7a6176634..53db95f0534 100644 --- a/tests/components/philips_js/snapshots/test_diagnostics.ambr +++ b/tests/components/philips_js/snapshots/test_diagnostics.ambr @@ -94,6 +94,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/philips_js/test_config_flow.py b/tests/components/philips_js/test_config_flow.py index 80d05961813..4b8048a8ebe 100644 --- a/tests/components/philips_js/test_config_flow.py +++ b/tests/components/philips_js/test_config_flow.py @@ -155,6 +155,7 @@ async def test_pairing(hass: HomeAssistant, mock_tv_pairable, mock_setup_entry) "version": 1, "options": {}, "minor_version": 1, + "subentries": (), } await hass.async_block_till_done() diff --git a/tests/components/pi_hole/snapshots/test_diagnostics.ambr b/tests/components/pi_hole/snapshots/test_diagnostics.ambr index 3094fcef24b..2d6f6687d04 100644 --- a/tests/components/pi_hole/snapshots/test_diagnostics.ambr +++ b/tests/components/pi_hole/snapshots/test_diagnostics.ambr @@ -33,6 +33,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/proximity/snapshots/test_diagnostics.ambr b/tests/components/proximity/snapshots/test_diagnostics.ambr index 3d9673ffd90..42ec74710f9 100644 --- a/tests/components/proximity/snapshots/test_diagnostics.ambr +++ b/tests/components/proximity/snapshots/test_diagnostics.ambr @@ -102,6 +102,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'home', 'unique_id': 'proximity_home', 'version': 1, diff --git a/tests/components/ps4/test_init.py b/tests/components/ps4/test_init.py index d14f367b2bd..24d45fee5b9 100644 --- a/tests/components/ps4/test_init.py +++ b/tests/components/ps4/test_init.py @@ -52,6 +52,7 @@ MOCK_FLOW_RESULT = { "title": "test_ps4", "data": MOCK_DATA, "options": {}, + "subentries": (), } MOCK_ENTRY_ID = "SomeID" diff --git a/tests/components/purpleair/test_diagnostics.py b/tests/components/purpleair/test_diagnostics.py index ae4b28567be..6271a63d652 100644 --- a/tests/components/purpleair/test_diagnostics.py +++ b/tests/components/purpleair/test_diagnostics.py @@ -38,6 +38,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "data": { "fields": [ diff --git a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr index e131bf3d952..abf8e380916 100644 --- a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr +++ b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, @@ -84,6 +86,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/rainmachine/snapshots/test_diagnostics.ambr b/tests/components/rainmachine/snapshots/test_diagnostics.ambr index acd5fd165b4..681805996f1 100644 --- a/tests/components/rainmachine/snapshots/test_diagnostics.ambr +++ b/tests/components/rainmachine/snapshots/test_diagnostics.ambr @@ -1144,6 +1144,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 2, @@ -2275,6 +2277,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/recollect_waste/test_diagnostics.py b/tests/components/recollect_waste/test_diagnostics.py index 24c690bcb37..a57e289ec04 100644 --- a/tests/components/recollect_waste/test_diagnostics.py +++ b/tests/components/recollect_waste/test_diagnostics.py @@ -34,6 +34,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "data": [ { diff --git a/tests/components/ridwell/snapshots/test_diagnostics.ambr b/tests/components/ridwell/snapshots/test_diagnostics.ambr index b03d87c7a89..4b4dda7227d 100644 --- a/tests/components/ridwell/snapshots/test_diagnostics.ambr +++ b/tests/components/ridwell/snapshots/test_diagnostics.ambr @@ -44,6 +44,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/samsungtv/test_diagnostics.py b/tests/components/samsungtv/test_diagnostics.py index 0319d5dd8dd..e8e0b699a7e 100644 --- a/tests/components/samsungtv/test_diagnostics.py +++ b/tests/components/samsungtv/test_diagnostics.py @@ -51,6 +51,7 @@ async def test_entry_diagnostics( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, @@ -91,6 +92,7 @@ async def test_entry_diagnostics_encrypted( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, @@ -130,6 +132,7 @@ async def test_entry_diagnostics_encrypte_offline( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", + "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, diff --git a/tests/components/screenlogic/snapshots/test_diagnostics.ambr b/tests/components/screenlogic/snapshots/test_diagnostics.ambr index 237d3eab257..c7db7a33959 100644 --- a/tests/components/screenlogic/snapshots/test_diagnostics.ambr +++ b/tests/components/screenlogic/snapshots/test_diagnostics.ambr @@ -18,6 +18,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Pentair: DD-EE-FF', 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, diff --git a/tests/components/simplisafe/test_diagnostics.py b/tests/components/simplisafe/test_diagnostics.py index d5479f00b06..13c1e28aa36 100644 --- a/tests/components/simplisafe/test_diagnostics.py +++ b/tests/components/simplisafe/test_diagnostics.py @@ -32,6 +32,7 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, "subscription_data": { "12345": { diff --git a/tests/components/solarlog/snapshots/test_diagnostics.ambr b/tests/components/solarlog/snapshots/test_diagnostics.ambr index e0f1bc2623c..6aef72ebbd5 100644 --- a/tests/components/solarlog/snapshots/test_diagnostics.ambr +++ b/tests/components/solarlog/snapshots/test_diagnostics.ambr @@ -18,6 +18,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'solarlog', 'unique_id': None, 'version': 1, diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 6abc544c92a..0b45546902b 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -136,6 +136,7 @@ async def test_user_form_pin_not_required( "data": deepcopy(TEST_CONFIG), "options": {}, "minor_version": 1, + "subentries": (), } expected["data"][CONF_PIN] = None @@ -341,6 +342,7 @@ async def test_pin_form_success(hass: HomeAssistant, pin_form) -> None: "data": TEST_CONFIG, "options": {}, "minor_version": 1, + "subentries": (), } result["data"][CONF_DEVICE_ID] = TEST_DEVICE_ID assert result == expected diff --git a/tests/components/switcher_kis/test_diagnostics.py b/tests/components/switcher_kis/test_diagnostics.py index 53572085f9b..f59958420c4 100644 --- a/tests/components/switcher_kis/test_diagnostics.py +++ b/tests/components/switcher_kis/test_diagnostics.py @@ -69,5 +69,6 @@ async def test_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, + "subentries": [], }, } diff --git a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr index 75d942fc601..afa508cc004 100644 --- a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr +++ b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr @@ -56,6 +56,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'System Monitor', 'unique_id': None, 'version': 1, @@ -111,6 +113,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'System Monitor', 'unique_id': None, 'version': 1, diff --git a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr index 3180c7c0b1d..b5b33d7c246 100644 --- a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr +++ b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr @@ -37,6 +37,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/tractive/snapshots/test_diagnostics.ambr b/tests/components/tractive/snapshots/test_diagnostics.ambr index 11427a84801..3613f7e5997 100644 --- a/tests/components/tractive/snapshots/test_diagnostics.ambr +++ b/tests/components/tractive/snapshots/test_diagnostics.ambr @@ -17,6 +17,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': 'very_unique_string', 'version': 1, diff --git a/tests/components/tuya/snapshots/test_config_flow.ambr b/tests/components/tuya/snapshots/test_config_flow.ambr index a5a68a12a22..90d83d69814 100644 --- a/tests/components/tuya/snapshots/test_config_flow.ambr +++ b/tests/components/tuya/snapshots/test_config_flow.ambr @@ -24,6 +24,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '12345', 'unique_id': '12345', 'version': 1, @@ -54,6 +56,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Old Tuya configuration entry', 'unique_id': '12345', 'version': 1, @@ -107,10 +111,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'mocked_username', 'unique_id': None, 'version': 1, }), + 'subentries': tuple( + ), 'title': 'mocked_username', 'type': , 'version': 1, diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index 28ec98cf572..e52f76634fd 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -37,6 +37,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Twinkly', 'unique_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', 'version': 1, diff --git a/tests/components/unifi/snapshots/test_diagnostics.ambr b/tests/components/unifi/snapshots/test_diagnostics.ambr index 4ba90a00113..aa7337be0ba 100644 --- a/tests/components/unifi/snapshots/test_diagnostics.ambr +++ b/tests/components/unifi/snapshots/test_diagnostics.ambr @@ -42,6 +42,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': '1', 'version': 1, diff --git a/tests/components/uptime/snapshots/test_config_flow.ambr b/tests/components/uptime/snapshots/test_config_flow.ambr index 38312667375..93b1da60998 100644 --- a/tests/components/uptime/snapshots/test_config_flow.ambr +++ b/tests/components/uptime/snapshots/test_config_flow.ambr @@ -27,10 +27,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Uptime', 'unique_id': None, 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Uptime', 'type': , 'version': 1, diff --git a/tests/components/utility_meter/snapshots/test_diagnostics.ambr b/tests/components/utility_meter/snapshots/test_diagnostics.ambr index 6cdf121d7e3..ef235bba99d 100644 --- a/tests/components/utility_meter/snapshots/test_diagnostics.ambr +++ b/tests/components/utility_meter/snapshots/test_diagnostics.ambr @@ -25,6 +25,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Energy Bill', 'unique_id': None, 'version': 2, diff --git a/tests/components/v2c/snapshots/test_diagnostics.ambr b/tests/components/v2c/snapshots/test_diagnostics.ambr index 96567b80c54..780a00acd64 100644 --- a/tests/components/v2c/snapshots/test_diagnostics.ambr +++ b/tests/components/v2c/snapshots/test_diagnostics.ambr @@ -16,6 +16,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': 'ABC123', 'version': 1, diff --git a/tests/components/vicare/snapshots/test_diagnostics.ambr b/tests/components/vicare/snapshots/test_diagnostics.ambr index ae9b05389c7..0b1dcef5a29 100644 --- a/tests/components/vicare/snapshots/test_diagnostics.ambr +++ b/tests/components/vicare/snapshots/test_diagnostics.ambr @@ -4731,6 +4731,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': 'ViCare', 'version': 1, diff --git a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr index c258b14dc2d..dd268f4ed1a 100644 --- a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr +++ b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr @@ -35,6 +35,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/watttime/snapshots/test_diagnostics.ambr b/tests/components/watttime/snapshots/test_diagnostics.ambr index 0c137acc36b..3cc5e1d6f66 100644 --- a/tests/components/watttime/snapshots/test_diagnostics.ambr +++ b/tests/components/watttime/snapshots/test_diagnostics.ambr @@ -27,6 +27,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/webmin/snapshots/test_diagnostics.ambr b/tests/components/webmin/snapshots/test_diagnostics.ambr index 8299b0eafba..c64fa212a98 100644 --- a/tests/components/webmin/snapshots/test_diagnostics.ambr +++ b/tests/components/webmin/snapshots/test_diagnostics.ambr @@ -253,6 +253,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/webostv/test_diagnostics.py b/tests/components/webostv/test_diagnostics.py index 3d7cb00e021..7f54e940966 100644 --- a/tests/components/webostv/test_diagnostics.py +++ b/tests/components/webostv/test_diagnostics.py @@ -61,5 +61,6 @@ async def test_diagnostics( "created_at": entry.created_at.isoformat(), "modified_at": entry.modified_at.isoformat(), "discovery_keys": {}, + "subentries": [], }, } diff --git a/tests/components/whirlpool/snapshots/test_diagnostics.ambr b/tests/components/whirlpool/snapshots/test_diagnostics.ambr index c60ce17b952..ee8abe04bf1 100644 --- a/tests/components/whirlpool/snapshots/test_diagnostics.ambr +++ b/tests/components/whirlpool/snapshots/test_diagnostics.ambr @@ -38,6 +38,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/whois/snapshots/test_config_flow.ambr b/tests/components/whois/snapshots/test_config_flow.ambr index 937502d4d6c..0d99b0596e3 100644 --- a/tests/components/whois/snapshots/test_config_flow.ambr +++ b/tests/components/whois/snapshots/test_config_flow.ambr @@ -30,10 +30,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -70,10 +74,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -110,10 +118,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -150,10 +162,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, @@ -190,10 +206,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Example.com', 'type': , 'version': 1, diff --git a/tests/components/workday/snapshots/test_diagnostics.ambr b/tests/components/workday/snapshots/test_diagnostics.ambr index f41b86b7f6d..e7331b911a8 100644 --- a/tests/components/workday/snapshots/test_diagnostics.ambr +++ b/tests/components/workday/snapshots/test_diagnostics.ambr @@ -40,6 +40,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/wyoming/snapshots/test_config_flow.ambr b/tests/components/wyoming/snapshots/test_config_flow.ambr index bdead0f2028..d288c531407 100644 --- a/tests/components/wyoming/snapshots/test_config_flow.ambr +++ b/tests/components/wyoming/snapshots/test_config_flow.ambr @@ -36,10 +36,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'hassio', + 'subentries': list([ + ]), 'title': 'Piper', 'unique_id': '1234', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Piper', 'type': , 'version': 1, @@ -82,10 +86,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'hassio', + 'subentries': list([ + ]), 'title': 'Piper', 'unique_id': '1234', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Piper', 'type': , 'version': 1, @@ -127,10 +135,14 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', + 'subentries': list([ + ]), 'title': 'Test Satellite', 'unique_id': 'test_zeroconf_name._wyoming._tcp.local._Test Satellite', 'version': 1, }), + 'subentries': tuple( + ), 'title': 'Test Satellite', 'type': , 'version': 1, diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr index f46a06e84b8..08807f65d5d 100644 --- a/tests/components/zha/snapshots/test_diagnostics.ambr +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -113,6 +113,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 4, diff --git a/tests/snapshots/test_config_entries.ambr b/tests/snapshots/test_config_entries.ambr index 51e56f4874e..08b532677f4 100644 --- a/tests/snapshots/test_config_entries.ambr +++ b/tests/snapshots/test_config_entries.ambr @@ -16,6 +16,8 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', + 'subentries': list([ + ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index aba85a35349..1ad152e8e42 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from collections.abc import Generator +from contextlib import AbstractContextManager, nullcontext as does_not_raise from datetime import timedelta import logging import re @@ -905,7 +906,7 @@ async def test_entries_excludes_ignore_and_disabled( async def test_saving_and_loading( - hass: HomeAssistant, freezer: FrozenDateTimeFactory + hass: HomeAssistant, freezer: FrozenDateTimeFactory, hass_storage: dict[str, Any] ) -> None: """Test that we're saving and loading correctly.""" mock_integration( @@ -922,7 +923,17 @@ async def test_saving_and_loading( async def async_step_user(self, user_input=None): """Test user step.""" await self.async_set_unique_id("unique") - return self.async_create_entry(title="Test Title", data={"token": "abcd"}) + subentries = [ + config_entries.ConfigSubentryData( + data={"foo": "bar"}, title="subentry 1" + ), + config_entries.ConfigSubentryData( + data={"sun": "moon"}, title="subentry 2", unique_id="very_unique" + ), + ] + return self.async_create_entry( + title="Test Title", data={"token": "abcd"}, subentries=subentries + ) with mock_config_flow("test", TestFlow): await hass.config_entries.flow.async_init( @@ -971,6 +982,98 @@ async def test_saving_and_loading( # To execute the save await hass.async_block_till_done() + stored_data = hass_storage["core.config_entries"] + assert stored_data == { + "data": { + "entries": [ + { + "created_at": ANY, + "data": { + "token": "abcd", + }, + "disabled_by": None, + "discovery_keys": {}, + "domain": "test", + "entry_id": ANY, + "minor_version": 1, + "modified_at": ANY, + "options": {}, + "pref_disable_new_entities": True, + "pref_disable_polling": True, + "source": "user", + "subentries": [ + { + "data": {"foo": "bar"}, + "subentry_id": ANY, + "title": "subentry 1", + "unique_id": None, + }, + { + "data": {"sun": "moon"}, + "subentry_id": ANY, + "title": "subentry 2", + "unique_id": "very_unique", + }, + ], + "title": "Test Title", + "unique_id": "unique", + "version": 5, + }, + { + "created_at": ANY, + "data": { + "username": "bla", + }, + "disabled_by": None, + "discovery_keys": { + "test": [ + {"domain": "test", "key": "blah", "version": 1}, + ], + }, + "domain": "test", + "entry_id": ANY, + "minor_version": 1, + "modified_at": ANY, + "options": {}, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "source": "user", + "subentries": [], + "title": "Test 2 Title", + "unique_id": None, + "version": 3, + }, + { + "created_at": ANY, + "data": { + "username": "bla", + }, + "disabled_by": None, + "discovery_keys": { + "test": [ + {"domain": "test", "key": ["a", "b"], "version": 1}, + ], + }, + "domain": "test", + "entry_id": ANY, + "minor_version": 1, + "modified_at": ANY, + "options": {}, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "source": "user", + "subentries": [], + "title": "Test 2 Title", + "unique_id": None, + "version": 3, + }, + ], + }, + "key": "core.config_entries", + "minor_version": 5, + "version": 1, + } + # Now load written data in new config manager manager = config_entries.ConfigEntries(hass, {}) await manager.async_initialize() @@ -983,6 +1086,25 @@ async def test_saving_and_loading( ): assert orig.as_dict() == loaded.as_dict() + hass.config_entries.async_update_entry( + entry_1, + pref_disable_polling=False, + pref_disable_new_entities=False, + ) + + # To trigger the call_later + freezer.tick(1.0) + async_fire_time_changed(hass) + # To execute the save + await hass.async_block_till_done() + + # Assert no data is lost when storing again + expected_stored_data = stored_data + expected_stored_data["data"]["entries"][0]["modified_at"] = ANY + expected_stored_data["data"]["entries"][0]["pref_disable_new_entities"] = False + expected_stored_data["data"]["entries"][0]["pref_disable_polling"] = False + assert hass_storage["core.config_entries"] == expected_stored_data | {} + @freeze_time("2024-02-14 12:00:00") async def test_as_dict(snapshot: SnapshotAssertion) -> None: @@ -1416,6 +1538,42 @@ async def test_update_entry_options_and_trigger_listener( assert len(update_listener_calls) == 1 +async def test_update_subentry_and_trigger_listener( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can update subentry and trigger listener.""" + entry = MockConfigEntry(domain="test", options={"first": True}) + entry.add_to_manager(manager) + update_listener_calls = [] + + subentry = config_entries.ConfigSubentry( + data={"test": "test"}, unique_id="test", title="Mock title" + ) + + async def update_listener( + hass: HomeAssistant, entry: config_entries.ConfigEntry + ) -> None: + """Test function.""" + assert entry.subentries == expected_subentries + update_listener_calls.append(None) + + entry.add_update_listener(update_listener) + + expected_subentries = {subentry.subentry_id: subentry} + assert manager.async_add_subentry(entry, subentry) is True + + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.subentries == expected_subentries + assert len(update_listener_calls) == 1 + + expected_subentries = {} + assert manager.async_remove_subentry(entry, subentry.subentry_id) is True + + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.subentries == expected_subentries + assert len(update_listener_calls) == 2 + + async def test_setup_raise_not_ready( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -1742,17 +1900,453 @@ async def test_entry_options_unknown_config_entry( mock_integration(hass, MockModule("test")) mock_platform(hass, "test.config_flow", None) - class TestFlow: + with pytest.raises(config_entries.UnknownEntry): + await manager.options.async_create_flow( + "blah", context={"source": "test"}, data=None + ) + + +async def test_create_entry_subentries( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test a config entry being created with subentries.""" + + subentrydata = config_entries.ConfigSubentryData( + data={"test": "test"}, + title="Mock title", + unique_id="test", + ) + + async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Mock setup.""" + hass.async_create_task( + hass.config_entries.flow.async_init( + "comp", + context={"source": config_entries.SOURCE_IMPORT}, + data={"data": "data", "subentry": subentrydata}, + ) + ) + return True + + async_setup_entry = AsyncMock(return_value=True) + mock_integration( + hass, + MockModule( + "comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry + ), + ) + mock_platform(hass, "comp.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_import(self, user_input): + """Test import step creating entry, with subentry.""" + return self.async_create_entry( + title="title", + data={"example": user_input["data"]}, + subentries=[user_input["subentry"]], + ) + + with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): + assert await async_setup_component(hass, "comp", {}) + + await hass.async_block_till_done() + + assert len(async_setup_entry.mock_calls) == 1 + + entries = hass.config_entries.async_entries("comp") + assert len(entries) == 1 + assert entries[0].supported_subentries == () + assert entries[0].data == {"example": "data"} + assert len(entries[0].subentries) == 1 + subentry_id = list(entries[0].subentries)[0] + subentry = config_entries.ConfigSubentry( + data=subentrydata["data"], + subentry_id=subentry_id, + title=subentrydata["title"], + unique_id="test", + ) + assert entries[0].subentries == {subentry_id: subentry} + + +async def test_entry_subentry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can add a subentry to an entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): """Test flow.""" @staticmethod @callback - def async_get_options_flow(config_entry): - """Test options flow.""" + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": "test", + }, + ) + + assert entry.data == {"first": True} + assert entry.options == {} + subentry_id = list(entry.subentries)[0] + assert entry.subentries == { + subentry_id: config_entries.ConfigSubentry( + data={"second": True}, + subentry_id=subentry_id, + title="Mock title", + unique_id="test", + ) + } + assert entry.supported_subentries == ("test",) + + +async def test_entry_subentry_non_string( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test adding an invalid subentry to an entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + with pytest.raises(HomeAssistantError): + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": 123, + }, + ) + + +@pytest.mark.parametrize("context", [None, {}, {"bla": "bleh"}]) +async def test_entry_subentry_no_context( + hass: HomeAssistant, manager: config_entries.ConfigEntries, context: dict | None +) -> None: + """Test starting a subentry flow without "source" in context.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow), pytest.raises(KeyError): + await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context=context, data=None + ) + + +@pytest.mark.parametrize( + ("unique_id", "expected_result"), + [(None, does_not_raise()), ("test", pytest.raises(HomeAssistantError))], +) +async def test_entry_subentry_duplicate( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + unique_id: str | None, + expected_result: AbstractContextManager, +) -> None: + """Test adding a duplicated subentry to an entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry( + domain="test", + data={"first": True}, + subentries_data=[ + config_entries.ConfigSubentryData( + data={}, + subentry_id="blabla", + title="Mock title", + unique_id=unique_id, + ) + ], + ) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + with expected_result: + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": unique_id, + }, + ) + + +async def test_entry_subentry_abort( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can abort subentry flow.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + assert await manager.subentries.async_finish_flow( + flow, {"type": data_entry_flow.FlowResultType.ABORT, "reason": "test"} + ) + + +async def test_entry_subentry_unknown_config_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to start a subentry flow for an unknown config entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) with pytest.raises(config_entries.UnknownEntry): - await manager.options.async_create_flow( - "blah", context={"source": "test"}, data=None + await manager.subentries.async_create_flow( + ("blah", "blah"), context={"source": "test"}, data=None + ) + + +async def test_entry_subentry_deleted_config_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to finish a subentry flow for a deleted config entry.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with mock_config_flow("test", TestFlow): + flow = await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None + ) + + flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry + + await hass.config_entries.async_remove(entry.entry_id) + + with pytest.raises(config_entries.UnknownEntry): + await manager.subentries.async_finish_flow( + flow, + { + "data": {"second": True}, + "title": "Mock title", + "type": data_entry_flow.FlowResultType.CREATE_ENTRY, + "unique_id": "test", + }, + ) + + +async def test_entry_subentry_unsupported( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to start a subentry flow for a config entry without support.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_subentry_flow(config_entry, subentry_type: str): + """Test subentry flow.""" + + class SubentryFlowHandler(data_entry_flow.FlowHandler): + """Test subentry flow handler.""" + + return SubentryFlowHandler() + + @classmethod + @callback + def async_supported_subentries( + cls, config_entry: ConfigEntry + ) -> tuple[str, ...]: + return ("test",) + + with ( + mock_config_flow("test", TestFlow), + pytest.raises(data_entry_flow.UnknownHandler), + ): + await manager.subentries.async_create_flow( + ( + entry.entry_id, + "unknown", + ), + context={"source": "test"}, + data=None, + ) + + +async def test_entry_subentry_unsupported_subentry_type( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test attempting to start a subentry flow for a config entry without support.""" + mock_integration(hass, MockModule("test")) + mock_platform(hass, "test.config_flow", None) + entry = MockConfigEntry(domain="test", data={"first": True}) + entry.add_to_manager(manager) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + with ( + mock_config_flow("test", TestFlow), + pytest.raises(data_entry_flow.UnknownHandler), + ): + await manager.subentries.async_create_flow( + (entry.entry_id, "test"), context={"source": "test"}, data=None ) @@ -3911,21 +4505,20 @@ async def test_updating_entry_with_and_without_changes( assert manager.async_update_entry(entry) is False - for change in ( - {"data": {"second": True, "third": 456}}, - {"data": {"second": True}}, - {"minor_version": 2}, - {"options": {"hello": True}}, - {"pref_disable_new_entities": True}, - {"pref_disable_polling": True}, - {"title": "sometitle"}, - {"unique_id": "abcd1234"}, - {"version": 2}, + for change, expected_value in ( + ({"data": {"second": True, "third": 456}}, {"second": True, "third": 456}), + ({"data": {"second": True}}, {"second": True}), + ({"minor_version": 2}, 2), + ({"options": {"hello": True}}, {"hello": True}), + ({"pref_disable_new_entities": True}, True), + ({"pref_disable_polling": True}, True), + ({"title": "sometitle"}, "sometitle"), + ({"unique_id": "abcd1234"}, "abcd1234"), + ({"version": 2}, 2), ): assert manager.async_update_entry(entry, **change) is True key = next(iter(change)) - value = next(iter(change.values())) - assert getattr(entry, key) == value + assert getattr(entry, key) == expected_value assert manager.async_update_entry(entry, **change) is False assert manager.async_entry_for_domain_unique_id("test", "abc123") is None @@ -5459,6 +6052,7 @@ async def test_unhashable_unique_id_fails( minor_version=1, options={}, source="test", + subentries_data=(), title="title", unique_id=unique_id, version=1, @@ -5494,6 +6088,7 @@ async def test_unhashable_unique_id_fails_on_update( minor_version=1, options={}, source="test", + subentries_data=(), title="title", unique_id="123", version=1, @@ -5524,6 +6119,7 @@ async def test_string_unique_id_no_warning( minor_version=1, options={}, source="test", + subentries_data=(), title="title", unique_id="123", version=1, @@ -5566,6 +6162,7 @@ async def test_hashable_unique_id( minor_version=1, options={}, source="test", + subentries_data=(), title="title", unique_id=unique_id, version=1, @@ -5600,6 +6197,7 @@ async def test_no_unique_id_no_warning( minor_version=1, options={}, source="test", + subentries_data=(), title="title", unique_id=None, version=1, @@ -6524,6 +7122,7 @@ async def test_migration_from_1_2( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "import", + "subentries": {}, "title": "Sun", "unique_id": None, "version": 1, From a3584919706cd5497d9c8ac9331123893a616001 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:16:54 +0100 Subject: [PATCH 143/677] Migrate wiz light tests to use Kelvin (#133032) --- tests/components/wiz/test_light.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/components/wiz/test_light.py b/tests/components/wiz/test_light.py index 1fb87b30a5f..5c74d407238 100644 --- a/tests/components/wiz/test_light.py +++ b/tests/components/wiz/test_light.py @@ -4,7 +4,7 @@ from pywizlight import PilotBuilder from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -91,7 +91,7 @@ async def test_rgbww_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 153, ATTR_BRIGHTNESS: 128}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6535, ATTR_BRIGHTNESS: 128}, blocking=True, ) pilot: PilotBuilder = bulb.turn_on.mock_calls[0][1][0] @@ -99,7 +99,7 @@ async def test_rgbww_light(hass: HomeAssistant) -> None: await async_push_update(hass, bulb, {"mac": FAKE_MAC, **pilot.pilot_params}) state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 153 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 6535 bulb.turn_on.reset_mock() await hass.services.async_call( @@ -148,7 +148,7 @@ async def test_rgbw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 153, ATTR_BRIGHTNESS: 128}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6535, ATTR_BRIGHTNESS: 128}, blocking=True, ) pilot: PilotBuilder = bulb.turn_on.mock_calls[0][1][0] @@ -162,7 +162,7 @@ async def test_turnable_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 153, ATTR_BRIGHTNESS: 128}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6535, ATTR_BRIGHTNESS: 128}, blocking=True, ) pilot: PilotBuilder = bulb.turn_on.mock_calls[0][1][0] @@ -171,7 +171,7 @@ async def test_turnable_light(hass: HomeAssistant) -> None: await async_push_update(hass, bulb, {"mac": FAKE_MAC, **pilot.pilot_params}) state = hass.states.get(entity_id) assert state.state == STATE_ON - assert state.attributes[ATTR_COLOR_TEMP] == 153 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 6535 async def test_old_firmware_dimmable_light(hass: HomeAssistant) -> None: From 798f3a34f3151d5c2e99bb4f8b8b39a98ab9c566 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:17:45 +0100 Subject: [PATCH 144/677] Migrate abode light tests to use Kelvin (#133001) --- tests/components/abode/test_light.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/abode/test_light.py b/tests/components/abode/test_light.py index d556a20fa90..4be94a09ee8 100644 --- a/tests/components/abode/test_light.py +++ b/tests/components/abode/test_light.py @@ -6,7 +6,7 @@ from homeassistant.components.abode import ATTR_DEVICE_ID from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, @@ -46,7 +46,7 @@ async def test_attributes(hass: HomeAssistant) -> None: assert state.state == STATE_ON assert state.attributes.get(ATTR_BRIGHTNESS) == 204 assert state.attributes.get(ATTR_RGB_COLOR) == (0, 64, 255) - assert state.attributes.get(ATTR_COLOR_TEMP) is None + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None assert state.attributes.get(ATTR_DEVICE_ID) == "ZB:db5b1a" assert not state.attributes.get("battery_low") assert not state.attributes.get("no_response") From c164507952e3400d0aecf020921173955a0b2c62 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:18:19 +0100 Subject: [PATCH 145/677] Add new integration slide_local (#132632) Co-authored-by: Joost Lekkerkerker --- CODEOWNERS | 2 + homeassistant/brands/slide.json | 5 + .../components/slide_local/__init__.py | 33 ++ .../components/slide_local/config_flow.py | 183 +++++++++ homeassistant/components/slide_local/const.py | 13 + .../components/slide_local/coordinator.py | 112 ++++++ homeassistant/components/slide_local/cover.py | 113 ++++++ .../components/slide_local/entity.py | 29 ++ .../components/slide_local/manifest.json | 17 + .../components/slide_local/quality_scale.yaml | 66 ++++ .../components/slide_local/strings.json | 35 ++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 17 +- homeassistant/generated/zeroconf.py | 4 + requirements_all.txt | 1 + requirements_test_all.txt | 4 + tests/components/slide_local/__init__.py | 21 + tests/components/slide_local/conftest.py | 63 +++ tests/components/slide_local/const.py | 8 + .../slide_local/fixtures/slide_1.json | 11 + .../slide_local/test_config_flow.py | 373 ++++++++++++++++++ 21 files changed, 1108 insertions(+), 3 deletions(-) create mode 100644 homeassistant/brands/slide.json create mode 100644 homeassistant/components/slide_local/__init__.py create mode 100644 homeassistant/components/slide_local/config_flow.py create mode 100644 homeassistant/components/slide_local/const.py create mode 100644 homeassistant/components/slide_local/coordinator.py create mode 100644 homeassistant/components/slide_local/cover.py create mode 100644 homeassistant/components/slide_local/entity.py create mode 100644 homeassistant/components/slide_local/manifest.json create mode 100644 homeassistant/components/slide_local/quality_scale.yaml create mode 100644 homeassistant/components/slide_local/strings.json create mode 100644 tests/components/slide_local/__init__.py create mode 100644 tests/components/slide_local/conftest.py create mode 100644 tests/components/slide_local/const.py create mode 100644 tests/components/slide_local/fixtures/slide_1.json create mode 100644 tests/components/slide_local/test_config_flow.py diff --git a/CODEOWNERS b/CODEOWNERS index 03b0e7b893b..6c11f57da83 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1359,6 +1359,8 @@ build.json @home-assistant/supervisor /homeassistant/components/sleepiq/ @mfugate1 @kbickar /tests/components/sleepiq/ @mfugate1 @kbickar /homeassistant/components/slide/ @ualex73 +/homeassistant/components/slide_local/ @dontinelli +/tests/components/slide_local/ @dontinelli /homeassistant/components/slimproto/ @marcelveldt /tests/components/slimproto/ @marcelveldt /homeassistant/components/sma/ @kellerza @rklomp diff --git a/homeassistant/brands/slide.json b/homeassistant/brands/slide.json new file mode 100644 index 00000000000..808a54affc3 --- /dev/null +++ b/homeassistant/brands/slide.json @@ -0,0 +1,5 @@ +{ + "domain": "slide", + "name": "Slide", + "integrations": ["slide", "slide_local"] +} diff --git a/homeassistant/components/slide_local/__init__.py b/homeassistant/components/slide_local/__init__.py new file mode 100644 index 00000000000..878830fe513 --- /dev/null +++ b/homeassistant/components/slide_local/__init__.py @@ -0,0 +1,33 @@ +"""Component for the Slide local API.""" + +from __future__ import annotations + +from goslideapi.goslideapi import GoSlideLocal as SlideLocalApi + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import SlideCoordinator + +PLATFORMS = [Platform.COVER] +type SlideConfigEntry = ConfigEntry[SlideLocalApi] + + +async def async_setup_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> bool: + """Set up the slide_local integration.""" + + coordinator = SlideCoordinator(hass, entry) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/slide_local/config_flow.py b/homeassistant/components/slide_local/config_flow.py new file mode 100644 index 00000000000..bc5033e972b --- /dev/null +++ b/homeassistant/components/slide_local/config_flow.py @@ -0,0 +1,183 @@ +"""Config flow for slide_local integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, + GoSlideLocal as SlideLocalApi, +) +import voluptuous as vol + +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_MAC, CONF_PASSWORD +from homeassistant.helpers.device_registry import format_mac + +from .const import CONF_INVERT_POSITION, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class SlideConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for slide_local.""" + + _mac: str = "" + _host: str = "" + _api_version: int | None = None + + VERSION = 1 + MINOR_VERSION = 1 + + async def async_test_connection( + self, user_input: dict[str, str | int] + ) -> dict[str, str]: + """Reusable Auth Helper.""" + slide = SlideLocalApi() + + # first test, if API version 2 is working + await slide.slide_add( + user_input[CONF_HOST], + user_input.get(CONF_PASSWORD, ""), + 2, + ) + + try: + result = await slide.slide_info(user_input[CONF_HOST]) + except (ClientConnectionError, ClientTimeoutError): + return {"base": "cannot_connect"} + except (AuthenticationFailed, DigestAuthCalcError): + return {"base": "invalid_auth"} + except Exception: # noqa: BLE001 + _LOGGER.exception("Exception occurred during connection test") + return {"base": "unknown"} + + if result is not None: + self._api_version = 2 + self._mac = format_mac(result["mac"]) + return {} + + # API version 2 is not working, try API version 1 instead + await slide.slide_del(user_input[CONF_HOST]) + await slide.slide_add( + user_input[CONF_HOST], + user_input.get(CONF_PASSWORD, ""), + 1, + ) + + try: + result = await slide.slide_info(user_input[CONF_HOST]) + except (ClientConnectionError, ClientTimeoutError): + return {"base": "cannot_connect"} + except (AuthenticationFailed, DigestAuthCalcError): + return {"base": "invalid_auth"} + except Exception: # noqa: BLE001 + _LOGGER.exception("Exception occurred during connection test") + return {"base": "unknown"} + + if result is None: + # API version 1 isn't working either + return {"base": "unknown"} + + self._api_version = 1 + self._mac = format_mac(result["mac"]) + + return {} + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user step.""" + errors = {} + if user_input is not None: + if not (errors := await self.async_test_connection(user_input)): + await self.async_set_unique_id(self._mac) + self._abort_if_unique_id_configured() + user_input |= { + CONF_MAC: self._mac, + CONF_API_VERSION: self._api_version, + } + + return self.async_create_entry( + title=user_input[CONF_HOST], + data=user_input, + options={CONF_INVERT_POSITION: False}, + ) + + if user_input is not None and user_input.get(CONF_HOST) is not None: + self._host = user_input[CONF_HOST] + + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Optional(CONF_PASSWORD): str, + } + ), + {CONF_HOST: self._host}, + ), + errors=errors, + ) + + async def async_step_zeroconf( + self, discovery_info: ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery.""" + + # id is in the format 'slide_000000000000' + self._mac = format_mac(str(discovery_info.properties.get("id"))[6:]) + + await self.async_set_unique_id(self._mac) + + self._abort_if_unique_id_configured( + {CONF_HOST: discovery_info.host}, reload_on_update=True + ) + + errors = {} + if errors := await self.async_test_connection( + { + CONF_HOST: self._host, + } + ): + return self.async_abort( + reason="discovery_connection_failed", + description_placeholders={ + "error": errors["base"], + }, + ) + + self._host = discovery_info.host + + return await self.async_step_zeroconf_confirm() + + async def async_step_zeroconf_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + + if user_input is not None: + user_input |= { + CONF_HOST: self._host, + CONF_API_VERSION: 2, + CONF_MAC: format_mac(self._mac), + } + return self.async_create_entry( + title=user_input[CONF_HOST], + data=user_input, + options={CONF_INVERT_POSITION: False}, + ) + + self._set_confirm_only() + return self.async_show_form( + step_id="zeroconf_confirm", + description_placeholders={ + "host": self._host, + }, + ) diff --git a/homeassistant/components/slide_local/const.py b/homeassistant/components/slide_local/const.py new file mode 100644 index 00000000000..9dc6d4ac925 --- /dev/null +++ b/homeassistant/components/slide_local/const.py @@ -0,0 +1,13 @@ +"""Define constants for the Slide component.""" + +API_LOCAL = "api_local" +ATTR_TOUCHGO = "touchgo" +CONF_INVERT_POSITION = "invert_position" +CONF_VERIFY_SSL = "verify_ssl" +DOMAIN = "slide_local" +SLIDES = "slides" +SLIDES_LOCAL = "slides_local" +DEFAULT_OFFSET = 0.15 +DEFAULT_RETRY = 120 +SERVICE_CALIBRATE = "calibrate" +SERVICE_TOUCHGO = "touchgo" diff --git a/homeassistant/components/slide_local/coordinator.py b/homeassistant/components/slide_local/coordinator.py new file mode 100644 index 00000000000..c7542a4b813 --- /dev/null +++ b/homeassistant/components/slide_local/coordinator.py @@ -0,0 +1,112 @@ +"""DataUpdateCoordinator for slide_local integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import TYPE_CHECKING, Any + +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, + GoSlideLocal as SlideLocalApi, +) + +from homeassistant.const import ( + CONF_API_VERSION, + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DEFAULT_OFFSET, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +if TYPE_CHECKING: + from . import SlideConfigEntry + + +class SlideCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Get and update the latest data.""" + + def __init__(self, hass: HomeAssistant, entry: SlideConfigEntry) -> None: + """Initialize the data object.""" + super().__init__( + hass, _LOGGER, name="Slide", update_interval=timedelta(seconds=15) + ) + self.slide = SlideLocalApi() + self.api_version = entry.data[CONF_API_VERSION] + self.mac = entry.data[CONF_MAC] + self.host = entry.data[CONF_HOST] + self.password = entry.data[CONF_PASSWORD] + + async def _async_setup(self) -> None: + """Do initialization logic for Slide coordinator.""" + _LOGGER.debug("Initializing Slide coordinator") + + await self.slide.slide_add( + self.host, + self.password, + self.api_version, + ) + + _LOGGER.debug("Slide coordinator initialized") + + async def _async_update_data(self) -> dict[str, Any]: + """Update the data from the Slide device.""" + _LOGGER.debug("Start data update") + + try: + data = await self.slide.slide_info(self.host) + except ( + ClientConnectionError, + AuthenticationFailed, + ClientTimeoutError, + DigestAuthCalcError, + ) as ex: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + ) from ex + + if data is None: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + ) + + if "pos" in data: + if self.data is None: + oldpos = None + else: + oldpos = self.data.get("pos") + + data["pos"] = max(0, min(1, data["pos"])) + + if oldpos is None or oldpos == data["pos"]: + data["state"] = ( + STATE_CLOSED if data["pos"] > (1 - DEFAULT_OFFSET) else STATE_OPEN + ) + elif oldpos < data["pos"]: + data["state"] = ( + STATE_CLOSED + if data["pos"] >= (1 - DEFAULT_OFFSET) + else STATE_CLOSING + ) + else: + data["state"] = ( + STATE_OPEN if data["pos"] <= DEFAULT_OFFSET else STATE_OPENING + ) + + _LOGGER.debug("Data successfully updated: %s", data) + + return data diff --git a/homeassistant/components/slide_local/cover.py b/homeassistant/components/slide_local/cover.py new file mode 100644 index 00000000000..1bf026746c6 --- /dev/null +++ b/homeassistant/components/slide_local/cover.py @@ -0,0 +1,113 @@ +"""Support for Slide covers.""" + +from __future__ import annotations + +import logging +from typing import Any + +from homeassistant.components.cover import ATTR_POSITION, CoverDeviceClass, CoverEntity +from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPENING +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SlideConfigEntry +from .const import CONF_INVERT_POSITION, DEFAULT_OFFSET +from .coordinator import SlideCoordinator +from .entity import SlideEntity + +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SlideConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up cover(s) for Slide platform.""" + + coordinator = entry.runtime_data + + async_add_entities( + [ + SlideCoverLocal( + coordinator, + entry, + ) + ] + ) + + +class SlideCoverLocal(SlideEntity, CoverEntity): + """Representation of a Slide Local API cover.""" + + _attr_assumed_state = True + _attr_device_class = CoverDeviceClass.CURTAIN + + def __init__( + self, + coordinator: SlideCoordinator, + entry: SlideConfigEntry, + ) -> None: + """Initialize the cover.""" + super().__init__(coordinator) + + self._attr_name = None + self._invert = entry.options[CONF_INVERT_POSITION] + self._attr_unique_id = coordinator.data["mac"] + + @property + def is_opening(self) -> bool: + """Return if the cover is opening or not.""" + return self.coordinator.data["state"] == STATE_OPENING + + @property + def is_closing(self) -> bool: + """Return if the cover is closing or not.""" + return self.coordinator.data["state"] == STATE_CLOSING + + @property + def is_closed(self) -> bool: + """Return None if status is unknown, True if closed, else False.""" + return self.coordinator.data["state"] == STATE_CLOSED + + @property + def current_cover_position(self) -> int | None: + """Return the current position of cover shutter.""" + pos = self.coordinator.data["pos"] + if pos is not None: + if (1 - pos) <= DEFAULT_OFFSET or pos <= DEFAULT_OFFSET: + pos = round(pos) + if not self._invert: + pos = 1 - pos + pos = int(pos * 100) + return pos + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + self.coordinator.data["state"] = STATE_OPENING + await self.coordinator.slide.slide_open(self.coordinator.host) + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close the cover.""" + self.coordinator.data["state"] = STATE_CLOSING + await self.coordinator.slide.slide_close(self.coordinator.host) + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self.coordinator.slide.slide_stop(self.coordinator.host) + + async def async_set_cover_position(self, **kwargs: Any) -> None: + """Move the cover to a specific position.""" + position = kwargs[ATTR_POSITION] / 100 + if not self._invert: + position = 1 - position + + if self.coordinator.data["pos"] is not None: + if position > self.coordinator.data["pos"]: + self.coordinator.data["state"] = STATE_CLOSING + else: + self.coordinator.data["state"] = STATE_OPENING + + await self.coordinator.slide.slide_set_position(self.coordinator.host, position) diff --git a/homeassistant/components/slide_local/entity.py b/homeassistant/components/slide_local/entity.py new file mode 100644 index 00000000000..c1dbc101e6f --- /dev/null +++ b/homeassistant/components/slide_local/entity.py @@ -0,0 +1,29 @@ +"""Entities for slide_local integration.""" + +from homeassistant.const import CONF_MAC +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import SlideCoordinator + + +class SlideEntity(CoordinatorEntity[SlideCoordinator]): + """Base class of a Slide local API cover.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: SlideCoordinator, + ) -> None: + """Initialize the Slide device.""" + super().__init__(coordinator) + + self._attr_device_info = DeviceInfo( + manufacturer="Innovation in Motion", + connections={(CONF_MAC, coordinator.data["mac"])}, + name=coordinator.data["device_name"], + sw_version=coordinator.api_version, + serial_number=coordinator.data["mac"], + configuration_url=f"http://{coordinator.host}", + ) diff --git a/homeassistant/components/slide_local/manifest.json b/homeassistant/components/slide_local/manifest.json new file mode 100644 index 00000000000..42c74b2c308 --- /dev/null +++ b/homeassistant/components/slide_local/manifest.json @@ -0,0 +1,17 @@ +{ + "domain": "slide_local", + "name": "Slide Local", + "codeowners": ["@dontinelli"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/slide_local", + "integration_type": "device", + "iot_class": "local_polling", + "quality_scale": "bronze", + "requirements": ["goslide-api==0.7.0"], + "zeroconf": [ + { + "type": "_http._tcp.local.", + "name": "slide*" + } + ] +} diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml new file mode 100644 index 00000000000..048a428f236 --- /dev/null +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -0,0 +1,66 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: No explicit event subscriptions. + dependency-transparency: done + action-setup: done + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: done + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: No custom action. + reauthentication-flow: todo + parallel-updates: done + test-coverage: todo + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: todo + + # Gold + entity-translations: todo + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: todo + diagnostics: todo + exception-translations: done + icon-translations: todo + reconfiguration-flow: todo + dynamic-devices: todo + discovery-update-info: todo + repair-issues: todo + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: + status: exempt + comment: | + This integration doesn't have known issues that could be resolved by the user. + docs-examples: done + # Platinum + async-dependency: done + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json new file mode 100644 index 00000000000..38090c7e62d --- /dev/null +++ b/homeassistant/components/slide_local/strings.json @@ -0,0 +1,35 @@ +{ + "config": { + "step": { + "user": { + "description": "Provide information to connect to the Slide device", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your local Slide", + "password": "The device code of your Slide (inside of the Slide or in the box, length is 8 characters). If your Slide runs firmware version 2 this is optional, as it is not used by the local API." + } + }, + "zeroconf_confirm": { + "title": "Confirm setup for Slide", + "description": "Do you want to setup {host}?" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "discovery_connection_failed": "The setup of the discovered device failed with the following error: {error}. Please try to set it up manually." + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "exceptions": { + "update_error": { + "message": "Error while updating data from the API." + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index a3858fd176f..b074ff714f6 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -545,6 +545,7 @@ FLOWS = { "skybell", "slack", "sleepiq", + "slide_local", "slimproto", "sma", "smappee", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 5128578b606..fcd974534af 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5660,9 +5660,20 @@ }, "slide": { "name": "Slide", - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_polling" + "integrations": { + "slide": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "cloud_polling", + "name": "Slide" + }, + "slide_local": { + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling", + "name": "Slide Local" + } + } }, "slimproto": { "name": "SlimProto (Squeezebox players)", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 9bfff93cc2f..b04e6ad6f52 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -562,6 +562,10 @@ ZEROCONF = { "domain": "shelly", "name": "shelly*", }, + { + "domain": "slide_local", + "name": "slide*", + }, { "domain": "synology_dsm", "properties": { diff --git a/requirements_all.txt b/requirements_all.txt index c361ffec5a8..4ee02e13695 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1028,6 +1028,7 @@ google-photos-library-api==0.12.1 googlemaps==2.5.1 # homeassistant.components.slide +# homeassistant.components.slide_local goslide-api==0.7.0 # homeassistant.components.tailwind diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1c918cb2f1c..f7faaa3ae0d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -877,6 +877,10 @@ google-photos-library-api==0.12.1 # homeassistant.components.google_travel_time googlemaps==2.5.1 +# homeassistant.components.slide +# homeassistant.components.slide_local +goslide-api==0.7.0 + # homeassistant.components.tailwind gotailwind==0.3.0 diff --git a/tests/components/slide_local/__init__.py b/tests/components/slide_local/__init__.py new file mode 100644 index 00000000000..cd7bd6cb6d1 --- /dev/null +++ b/tests/components/slide_local/__init__.py @@ -0,0 +1,21 @@ +"""Tests for the slide_local integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_platform( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> MockConfigEntry: + """Set up the slide local integration.""" + config_entry.add_to_hass(hass) + + with patch("homeassistant.components.slide_local.PLATFORMS", platforms): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/slide_local/conftest.py b/tests/components/slide_local/conftest.py new file mode 100644 index 00000000000..0d70d1989e7 --- /dev/null +++ b/tests/components/slide_local/conftest.py @@ -0,0 +1,63 @@ +"""Test fixtures for Slide local.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.slide_local.const import CONF_INVERT_POSITION, DOMAIN +from homeassistant.const import CONF_API_VERSION, CONF_HOST + +from .const import HOST, SLIDE_INFO_DATA + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="slide", + data={ + CONF_HOST: HOST, + CONF_API_VERSION: 2, + }, + options={ + CONF_INVERT_POSITION: False, + }, + minor_version=1, + unique_id="12:34:56:78:90:ab", + entry_id="ce5f5431554d101905d31797e1232da8", + ) + + +@pytest.fixture +def mock_slide_api(): + """Build a fixture for the SlideLocalApi that connects successfully and returns one device.""" + + mock_slide_local_api = AsyncMock() + mock_slide_local_api.slide_info.return_value = SLIDE_INFO_DATA + + with ( + patch( + "homeassistant.components.slide_local.SlideLocalApi", + autospec=True, + return_value=mock_slide_local_api, + ), + patch( + "homeassistant.components.slide_local.config_flow.SlideLocalApi", + autospec=True, + return_value=mock_slide_local_api, + ), + ): + yield mock_slide_local_api + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.slide_local.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry diff --git a/tests/components/slide_local/const.py b/tests/components/slide_local/const.py new file mode 100644 index 00000000000..edf45753407 --- /dev/null +++ b/tests/components/slide_local/const.py @@ -0,0 +1,8 @@ +"""Common const used across tests for slide_local.""" + +from homeassistant.components.slide_local.const import DOMAIN + +from tests.common import load_json_object_fixture + +HOST = "127.0.0.2" +SLIDE_INFO_DATA = load_json_object_fixture("slide_1.json", DOMAIN) diff --git a/tests/components/slide_local/fixtures/slide_1.json b/tests/components/slide_local/fixtures/slide_1.json new file mode 100644 index 00000000000..e8c3c85a324 --- /dev/null +++ b/tests/components/slide_local/fixtures/slide_1.json @@ -0,0 +1,11 @@ +{ + "slide_id": "slide_300000000000", + "mac": "300000000000", + "board_rev": 1, + "device_name": "slide bedroom", + "zone_name": "bedroom", + "curtain_type": 0, + "calib_time": 10239, + "pos": 0.0, + "touch_go": true +} diff --git a/tests/components/slide_local/test_config_flow.py b/tests/components/slide_local/test_config_flow.py new file mode 100644 index 00000000000..35aa99a90d7 --- /dev/null +++ b/tests/components/slide_local/test_config_flow.py @@ -0,0 +1,373 @@ +"""Test the slide_local config flow.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock + +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) +import pytest + +from homeassistant.components.slide_local.const import CONF_INVERT_POSITION, DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import HOST, SLIDE_INFO_DATA + +from tests.common import MockConfigEntry + +MOCK_ZEROCONF_DATA = ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.2"), + ip_addresses=[ip_address("127.0.0.2")], + hostname="Slide-1234567890AB.local.", + name="Slide-1234567890AB._http._tcp.local.", + port=80, + properties={ + "id": "slide-1234567890AB", + "arch": "esp32", + "app": "slide", + "fw_version": "2.0.0-1683059251", + "fw_id": "20230502-202745", + }, + type="mock_type", +) + + +async def test_user( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == HOST + assert result2["data"][CONF_HOST] == HOST + assert result2["data"][CONF_PASSWORD] == "pwd" + assert result2["data"][CONF_API_VERSION] == 2 + assert result2["result"].unique_id == "30:00:00:00:00:00" + assert not result2["options"][CONF_INVERT_POSITION] + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_api_1( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = [None, SLIDE_INFO_DATA] + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == HOST + assert result2["data"][CONF_HOST] == HOST + assert result2["data"][CONF_PASSWORD] == "pwd" + assert result2["data"][CONF_API_VERSION] == 1 + assert result2["result"].unique_id == "30:00:00:00:00:00" + assert not result2["options"][CONF_INVERT_POSITION] + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_api_error( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = [None, None] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"]["base"] == "unknown" + + mock_slide_api.slide_info.side_effect = [None, SLIDE_INFO_DATA] + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == HOST + assert result2["data"][CONF_HOST] == HOST + assert result2["data"][CONF_PASSWORD] == "pwd" + assert result2["data"][CONF_API_VERSION] == 1 + assert result2["result"].unique_id == "30:00:00:00:00:00" + assert not result2["options"][CONF_INVERT_POSITION] + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (ClientConnectionError, "cannot_connect"), + (ClientTimeoutError, "cannot_connect"), + (AuthenticationFailed, "invalid_auth"), + (DigestAuthCalcError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_api_1_exceptions( + hass: HomeAssistant, + exception: Exception, + error: str, + mock_slide_api: AsyncMock, +) -> None: + """Test we can handle Form exceptions for api 1.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = [None, exception] + + # tests with connection error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"]["base"] == error + + # tests with all provided + mock_slide_api.slide_info.side_effect = [None, SLIDE_INFO_DATA] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (ClientConnectionError, "cannot_connect"), + (ClientTimeoutError, "cannot_connect"), + (AuthenticationFailed, "invalid_auth"), + (DigestAuthCalcError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_api_2_exceptions( + hass: HomeAssistant, + exception: Exception, + error: str, + mock_slide_api: AsyncMock, +) -> None: + """Test we can handle Form exceptions for api 2.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_slide_api.slide_info.side_effect = exception + + # tests with connection error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"]["base"] == error + + # tests with all provided + mock_slide_api.slide_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_abort_if_already_setup( + hass: HomeAssistant, + mock_slide_api: AsyncMock, +) -> None: + """Test we abort if the device is already setup.""" + + MockConfigEntry(domain=DOMAIN, unique_id="30:00:00:00:00:00").add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PASSWORD: "pwd", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_zeroconf( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test starting a flow from discovery.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "127.0.0.2" + assert result["data"][CONF_HOST] == "127.0.0.2" + assert not result["options"][CONF_INVERT_POSITION] + assert result["result"].unique_id == "12:34:56:78:90:ab" + + +async def test_zeroconf_duplicate_entry( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test starting a flow from discovery.""" + + MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: HOST}, unique_id="12:34:56:78:90:ab" + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + entries = hass.config_entries.async_entries(DOMAIN) + assert entries[0].data[CONF_HOST] == HOST + + +async def test_zeroconf_update_duplicate_entry( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test updating an existing entry from discovery.""" + + MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.3"}, unique_id="12:34:56:78:90:ab" + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + entries = hass.config_entries.async_entries(DOMAIN) + assert entries[0].data[CONF_HOST] == HOST + + +@pytest.mark.parametrize( + ("exception"), + [ + (ClientConnectionError), + (ClientTimeoutError), + (AuthenticationFailed), + (DigestAuthCalcError), + (Exception), + ], +) +async def test_zeroconf_connection_error( + hass: HomeAssistant, + exception: Exception, + mock_slide_api: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test starting a flow from discovery.""" + + MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "slide_host"}, unique_id="12:34:56:78:90:cd" + ).add_to_hass(hass) + + mock_slide_api.slide_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=MOCK_ZEROCONF_DATA + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "discovery_connection_failed" From 55fa717f100e96626e077a61c874512a98b4dc44 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:18:27 +0100 Subject: [PATCH 146/677] Migrate flux_led light tests to use Kelvin (#133009) --- tests/components/flux_led/test_light.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/components/flux_led/test_light.py b/tests/components/flux_led/test_light.py index c12776eb552..a881bc2ea27 100644 --- a/tests/components/flux_led/test_light.py +++ b/tests/components/flux_led/test_light.py @@ -41,7 +41,7 @@ from homeassistant.components.flux_led.light import ( from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, @@ -777,12 +777,12 @@ async def test_rgb_cct_light(hass: HomeAssistant) -> None: assert attributes[ATTR_BRIGHTNESS] == 128 assert attributes[ATTR_COLOR_MODE] == "color_temp" assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "rgb"] - assert attributes[ATTR_COLOR_TEMP] == 200 + assert attributes[ATTR_COLOR_TEMP_KELVIN] == 5000 await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 370}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 2702}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(2702, 128) @@ -1003,7 +1003,7 @@ async def test_rgbw_light_warm_white(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1012,7 +1012,7 @@ async def test_rgbw_light_warm_white(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154, ATTR_BRIGHTNESS: 255}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493, ATTR_BRIGHTNESS: 255}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1021,7 +1021,7 @@ async def test_rgbw_light_warm_white(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 290}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 3448}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(3448, 255) @@ -1241,7 +1241,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1250,7 +1250,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 154, ATTR_BRIGHTNESS: 255}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 6493, ATTR_BRIGHTNESS: 255}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(6493, 255) @@ -1259,7 +1259,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 290}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 3448}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(3448, 255) @@ -1316,7 +1316,7 @@ async def test_rgbcw_light(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 170}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 5882}, blocking=True, ) bulb.async_set_white_temp.assert_called_with(5882, MIN_CCT_BRIGHTNESS) From 56db5368834da5c05da2699a2bae68d27fc0fac8 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Thu, 12 Dec 2024 20:23:14 +0100 Subject: [PATCH 147/677] Add Cookidoo integration (#129800) --- .strict-typing | 1 + CODEOWNERS | 2 + homeassistant/components/cookidoo/__init__.py | 49 +++ .../components/cookidoo/config_flow.py | 167 ++++++++++ homeassistant/components/cookidoo/const.py | 3 + .../components/cookidoo/coordinator.py | 101 ++++++ homeassistant/components/cookidoo/entity.py | 30 ++ homeassistant/components/cookidoo/icons.json | 12 + .../components/cookidoo/manifest.json | 11 + .../components/cookidoo/quality_scale.yaml | 90 ++++++ .../components/cookidoo/strings.json | 68 ++++ homeassistant/components/cookidoo/todo.py | 185 +++++++++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + mypy.ini | 10 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/cookidoo/__init__.py | 15 + tests/components/cookidoo/conftest.py | 76 +++++ .../cookidoo/fixtures/additional_items.json | 9 + .../cookidoo/fixtures/ingredient_items.json | 10 + .../cookidoo/snapshots/test_todo.ambr | 95 ++++++ tests/components/cookidoo/test_config_flow.py | 182 +++++++++++ tests/components/cookidoo/test_init.py | 102 ++++++ tests/components/cookidoo/test_todo.py | 292 ++++++++++++++++++ 25 files changed, 1523 insertions(+) create mode 100644 homeassistant/components/cookidoo/__init__.py create mode 100644 homeassistant/components/cookidoo/config_flow.py create mode 100644 homeassistant/components/cookidoo/const.py create mode 100644 homeassistant/components/cookidoo/coordinator.py create mode 100644 homeassistant/components/cookidoo/entity.py create mode 100644 homeassistant/components/cookidoo/icons.json create mode 100644 homeassistant/components/cookidoo/manifest.json create mode 100644 homeassistant/components/cookidoo/quality_scale.yaml create mode 100644 homeassistant/components/cookidoo/strings.json create mode 100644 homeassistant/components/cookidoo/todo.py create mode 100644 tests/components/cookidoo/__init__.py create mode 100644 tests/components/cookidoo/conftest.py create mode 100644 tests/components/cookidoo/fixtures/additional_items.json create mode 100644 tests/components/cookidoo/fixtures/ingredient_items.json create mode 100644 tests/components/cookidoo/snapshots/test_todo.ambr create mode 100644 tests/components/cookidoo/test_config_flow.py create mode 100644 tests/components/cookidoo/test_init.py create mode 100644 tests/components/cookidoo/test_todo.py diff --git a/.strict-typing b/.strict-typing index 130ae6e9393..ade5d6afb7b 100644 --- a/.strict-typing +++ b/.strict-typing @@ -137,6 +137,7 @@ homeassistant.components.co2signal.* homeassistant.components.command_line.* homeassistant.components.config.* homeassistant.components.configurator.* +homeassistant.components.cookidoo.* homeassistant.components.counter.* homeassistant.components.cover.* homeassistant.components.cpuspeed.* diff --git a/CODEOWNERS b/CODEOWNERS index 6c11f57da83..afd150ffb0c 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -284,6 +284,8 @@ build.json @home-assistant/supervisor /tests/components/control4/ @lawtancool /homeassistant/components/conversation/ @home-assistant/core @synesthesiam /tests/components/conversation/ @home-assistant/core @synesthesiam +/homeassistant/components/cookidoo/ @miaucl +/tests/components/cookidoo/ @miaucl /homeassistant/components/coolmaster/ @OnFreund /tests/components/coolmaster/ @OnFreund /homeassistant/components/counter/ @fabaff diff --git a/homeassistant/components/cookidoo/__init__.py b/homeassistant/components/cookidoo/__init__.py new file mode 100644 index 00000000000..bb78f2a569d --- /dev/null +++ b/homeassistant/components/cookidoo/__init__.py @@ -0,0 +1,49 @@ +"""The Cookidoo integration.""" + +from __future__ import annotations + +from cookidoo_api import Cookidoo, CookidooConfig, CookidooLocalizationConfig + +from homeassistant.const import ( + CONF_COUNTRY, + CONF_EMAIL, + CONF_LANGUAGE, + CONF_PASSWORD, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.TODO] + + +async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool: + """Set up Cookidoo from a config entry.""" + + cookidoo = Cookidoo( + async_get_clientsession(hass), + CookidooConfig( + email=entry.data[CONF_EMAIL], + password=entry.data[CONF_PASSWORD], + localization=CookidooLocalizationConfig( + country_code=entry.data[CONF_COUNTRY].lower(), + language=entry.data[CONF_LANGUAGE], + ), + ), + ) + + coordinator = CookidooDataUpdateCoordinator(hass, cookidoo, entry) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/cookidoo/config_flow.py b/homeassistant/components/cookidoo/config_flow.py new file mode 100644 index 00000000000..ce7ad9fde87 --- /dev/null +++ b/homeassistant/components/cookidoo/config_flow.py @@ -0,0 +1,167 @@ +"""Config flow for Cookidoo integration.""" + +from __future__ import annotations + +from collections.abc import Mapping +import logging +from typing import Any + +from cookidoo_api import ( + Cookidoo, + CookidooAuthException, + CookidooConfig, + CookidooLocalizationConfig, + CookidooRequestException, + get_country_options, + get_localization_options, +) +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import ( + CountrySelector, + CountrySelectorConfig, + LanguageSelector, + LanguageSelectorConfig, + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +AUTH_DATA_SCHEMA = { + vol.Required(CONF_EMAIL): TextSelector( + TextSelectorConfig( + type=TextSelectorType.EMAIL, + autocomplete="email", + ), + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ), + ), +} + + +class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Cookidoo.""" + + COUNTRY_DATA_SCHEMA: dict + LANGUAGE_DATA_SCHEMA: dict + + user_input: dict[str, Any] + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user step.""" + errors: dict[str, str] = {} + + if user_input is not None and not ( + errors := await self.validate_input(user_input) + ): + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + self.user_input = user_input + return await self.async_step_language() + await self.generate_country_schema() + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + data_schema=vol.Schema( + {**AUTH_DATA_SCHEMA, **self.COUNTRY_DATA_SCHEMA} + ), + suggested_values=user_input, + ), + errors=errors, + ) + + async def async_step_language( + self, + language_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Async language step to set up the connection.""" + errors: dict[str, str] = {} + if language_input is not None and not ( + errors := await self.validate_input(self.user_input, language_input) + ): + return self.async_create_entry( + title="Cookidoo", data={**self.user_input, **language_input} + ) + + await self.generate_language_schema() + return self.async_show_form( + step_id="language", + data_schema=vol.Schema(self.LANGUAGE_DATA_SCHEMA), + errors=errors, + ) + + async def generate_country_schema(self) -> None: + """Generate country schema.""" + self.COUNTRY_DATA_SCHEMA = { + vol.Required(CONF_COUNTRY): CountrySelector( + CountrySelectorConfig( + countries=[ + country.upper() for country in await get_country_options() + ], + ) + ) + } + + async def generate_language_schema(self) -> None: + """Generate language schema.""" + self.LANGUAGE_DATA_SCHEMA = { + vol.Required(CONF_LANGUAGE): LanguageSelector( + LanguageSelectorConfig( + languages=[ + option.language + for option in await get_localization_options( + country=self.user_input[CONF_COUNTRY].lower() + ) + ], + native_name=True, + ), + ), + } + + async def validate_input( + self, + user_input: Mapping[str, Any], + language_input: Mapping[str, Any] | None = None, + ) -> dict[str, str]: + """Input Helper.""" + + errors: dict[str, str] = {} + + session = async_get_clientsession(self.hass) + cookidoo = Cookidoo( + session, + CookidooConfig( + email=user_input[CONF_EMAIL], + password=user_input[CONF_PASSWORD], + localization=CookidooLocalizationConfig( + country_code=user_input[CONF_COUNTRY].lower(), + language=language_input[CONF_LANGUAGE] + if language_input + else "de-ch", + ), + ), + ) + try: + await cookidoo.login() + if language_input: + await cookidoo.get_additional_items() + except CookidooRequestException: + errors["base"] = "cannot_connect" + except CookidooAuthException: + errors["base"] = "invalid_auth" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + return errors diff --git a/homeassistant/components/cookidoo/const.py b/homeassistant/components/cookidoo/const.py new file mode 100644 index 00000000000..37c584404a0 --- /dev/null +++ b/homeassistant/components/cookidoo/const.py @@ -0,0 +1,3 @@ +"""Constants for the Cookidoo integration.""" + +DOMAIN = "cookidoo" diff --git a/homeassistant/components/cookidoo/coordinator.py b/homeassistant/components/cookidoo/coordinator.py new file mode 100644 index 00000000000..23a133ea16f --- /dev/null +++ b/homeassistant/components/cookidoo/coordinator.py @@ -0,0 +1,101 @@ +"""DataUpdateCoordinator for the Cookidoo integration.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import timedelta +import logging + +from cookidoo_api import ( + Cookidoo, + CookidooAdditionalItem, + CookidooAuthException, + CookidooException, + CookidooIngredientItem, + CookidooRequestException, +) + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +type CookidooConfigEntry = ConfigEntry[CookidooDataUpdateCoordinator] + + +@dataclass +class CookidooData: + """Cookidoo data type.""" + + ingredient_items: list[CookidooIngredientItem] + additional_items: list[CookidooAdditionalItem] + + +class CookidooDataUpdateCoordinator(DataUpdateCoordinator[CookidooData]): + """A Cookidoo Data Update Coordinator.""" + + config_entry: CookidooConfigEntry + + def __init__( + self, hass: HomeAssistant, cookidoo: Cookidoo, entry: CookidooConfigEntry + ) -> None: + """Initialize the Cookidoo data coordinator.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=timedelta(seconds=90), + config_entry=entry, + ) + self.cookidoo = cookidoo + + async def _async_setup(self) -> None: + try: + await self.cookidoo.login() + except CookidooRequestException as e: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="setup_request_exception", + ) from e + except CookidooAuthException as e: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="setup_authentication_exception", + translation_placeholders={ + CONF_EMAIL: self.config_entry.data[CONF_EMAIL] + }, + ) from e + + async def _async_update_data(self) -> CookidooData: + try: + ingredient_items = await self.cookidoo.get_ingredient_items() + additional_items = await self.cookidoo.get_additional_items() + except CookidooAuthException: + try: + await self.cookidoo.refresh_token() + except CookidooAuthException as exc: + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="setup_authentication_exception", + translation_placeholders={ + CONF_EMAIL: self.config_entry.data[CONF_EMAIL] + }, + ) from exc + _LOGGER.debug( + "Authentication failed but re-authentication was successful, trying again later" + ) + return self.data + except CookidooException as e: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_exception", + ) from e + + return CookidooData( + ingredient_items=ingredient_items, additional_items=additional_items + ) diff --git a/homeassistant/components/cookidoo/entity.py b/homeassistant/components/cookidoo/entity.py new file mode 100644 index 00000000000..5c8f3ec8441 --- /dev/null +++ b/homeassistant/components/cookidoo/entity.py @@ -0,0 +1,30 @@ +"""Base entity for the Cookidoo integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import CookidooDataUpdateCoordinator + + +class CookidooBaseEntity(CoordinatorEntity[CookidooDataUpdateCoordinator]): + """Cookidoo base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: CookidooDataUpdateCoordinator, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + name="Cookidoo", + identifiers={(DOMAIN, coordinator.config_entry.entry_id)}, + manufacturer="Vorwerk International & Co. KmG", + model="Cookidoo - Thermomix® recipe portal", + ) diff --git a/homeassistant/components/cookidoo/icons.json b/homeassistant/components/cookidoo/icons.json new file mode 100644 index 00000000000..36c0724331a --- /dev/null +++ b/homeassistant/components/cookidoo/icons.json @@ -0,0 +1,12 @@ +{ + "entity": { + "todo": { + "ingredient_list": { + "default": "mdi:cart-plus" + }, + "additional_item_list": { + "default": "mdi:cart-plus" + } + } + } +} diff --git a/homeassistant/components/cookidoo/manifest.json b/homeassistant/components/cookidoo/manifest.json new file mode 100644 index 00000000000..7e9e86f9d9d --- /dev/null +++ b/homeassistant/components/cookidoo/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "cookidoo", + "name": "Cookidoo", + "codeowners": ["@miaucl"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/cookidoo", + "integration_type": "service", + "iot_class": "cloud_polling", + "quality_scale": "bronze", + "requirements": ["cookidoo-api==0.10.0"] +} diff --git a/homeassistant/components/cookidoo/quality_scale.yaml b/homeassistant/components/cookidoo/quality_scale.yaml new file mode 100644 index 00000000000..7b2bbb7592b --- /dev/null +++ b/homeassistant/components/cookidoo/quality_scale.yaml @@ -0,0 +1,90 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: No service actions implemented + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: No service actions implemented + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: + status: exempt + comment: No special external action required + entity-event-setup: + status: exempt + comment: No callbacks are implemented + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: + status: done + comment: Offloaded to coordinator + entity-unavailable: + status: done + comment: Offloaded to coordinator + action-exceptions: + status: done + comment: Only providing todo actions + reauthentication-flow: todo + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: No options flow + + # Gold + entity-translations: done + entity-device-class: + status: exempt + comment: currently no platform with device classes + devices: done + entity-category: done + entity-disabled-by-default: + status: exempt + comment: No disabled entities implemented + discovery: + status: exempt + comment: Nothing to discover + stale-devices: + status: exempt + comment: No stale entities possible + diagnostics: todo + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: No dynamic entities available + discovery-update-info: + status: exempt + comment: No discoverable entities implemented + repair-issues: + status: exempt + comment: No issues/repairs + docs-use-cases: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/cookidoo/strings.json b/homeassistant/components/cookidoo/strings.json new file mode 100644 index 00000000000..2c518f472d5 --- /dev/null +++ b/homeassistant/components/cookidoo/strings.json @@ -0,0 +1,68 @@ +{ + "config": { + "step": { + "user": { + "title": "Login to Cookidoo", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]", + "country": "Country" + }, + "data_description": { + "email": "Email used access your Cookidoo account.", + "password": "Password used access your Cookidoo account.", + "country": "Pick your language for the Cookidoo content." + } + }, + "language": { + "title": "Login to Cookidoo", + "data": { + "language": "[%key:common::config_flow::data::language%]" + }, + "data_description": { + "language": "Pick your language for the Cookidoo content." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + } + }, + "entity": { + "todo": { + "ingredient_list": { + "name": "Shopping list" + }, + "additional_item_list": { + "name": "Additional purchases" + } + } + }, + "exceptions": { + "todo_save_item_failed": { + "message": "Failed to save {name} to Cookidoo shopping list" + }, + "todo_update_item_failed": { + "message": "Failed to update {name} in Cookidoo shopping list" + }, + "todo_delete_item_failed": { + "message": "Failed to delete {count} item(s) from Cookidoo shopping list" + }, + "setup_request_exception": { + "message": "Failed to connect to server, try again later" + }, + "setup_authentication_exception": { + "message": "Authentication failed for {email}, check your email and password" + }, + "update_exception": { + "message": "Unable to connect and retrieve data from cookidoo" + } + } +} diff --git a/homeassistant/components/cookidoo/todo.py b/homeassistant/components/cookidoo/todo.py new file mode 100644 index 00000000000..4a70dadc65a --- /dev/null +++ b/homeassistant/components/cookidoo/todo.py @@ -0,0 +1,185 @@ +"""Todo platform for the Cookidoo integration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from cookidoo_api import ( + CookidooAdditionalItem, + CookidooException, + CookidooIngredientItem, +) + +from homeassistant.components.todo import ( + TodoItem, + TodoItemStatus, + TodoListEntity, + TodoListEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator +from .entity import CookidooBaseEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: CookidooConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the todo list from a config entry created in the integrations UI.""" + coordinator = config_entry.runtime_data + + async_add_entities( + [ + CookidooIngredientsTodoListEntity(coordinator), + CookidooAdditionalItemTodoListEntity(coordinator), + ] + ) + + +class CookidooIngredientsTodoListEntity(CookidooBaseEntity, TodoListEntity): + """A To-do List representation of the ingredients in the Cookidoo Shopping List.""" + + _attr_translation_key = "ingredient_list" + _attr_supported_features = TodoListEntityFeature.UPDATE_TODO_ITEM + + def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_ingredients" + + @property + def todo_items(self) -> list[TodoItem]: + """Return the todo ingredients.""" + return [ + TodoItem( + uid=item.id, + summary=item.name, + description=item.description or "", + status=( + TodoItemStatus.COMPLETED + if item.is_owned + else TodoItemStatus.NEEDS_ACTION + ), + ) + for item in self.coordinator.data.ingredient_items + ] + + async def async_update_todo_item(self, item: TodoItem) -> None: + """Update an ingredient to the To-do list. + + Cookidoo ingredients can be changed in state, but not in summary or description. This is currently not possible to distinguish in home assistant and just fails silently. + """ + try: + if TYPE_CHECKING: + assert item.uid + await self.coordinator.cookidoo.edit_ingredient_items_ownership( + [ + CookidooIngredientItem( + id=item.uid, + name="", + description="", + is_owned=item.status == TodoItemStatus.COMPLETED, + ) + ] + ) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_update_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e + + await self.coordinator.async_refresh() + + +class CookidooAdditionalItemTodoListEntity(CookidooBaseEntity, TodoListEntity): + """A To-do List representation of the additional items in the Cookidoo Shopping List.""" + + _attr_translation_key = "additional_item_list" + _attr_supported_features = ( + TodoListEntityFeature.CREATE_TODO_ITEM + | TodoListEntityFeature.UPDATE_TODO_ITEM + | TodoListEntityFeature.DELETE_TODO_ITEM + ) + + def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.config_entry.entry_id}_additional_items" + + @property + def todo_items(self) -> list[TodoItem]: + """Return the todo items.""" + + return [ + TodoItem( + uid=item.id, + summary=item.name, + status=( + TodoItemStatus.COMPLETED + if item.is_owned + else TodoItemStatus.NEEDS_ACTION + ), + ) + for item in self.coordinator.data.additional_items + ] + + async def async_create_todo_item(self, item: TodoItem) -> None: + """Add an item to the To-do list.""" + + try: + if TYPE_CHECKING: + assert item.summary + await self.coordinator.cookidoo.add_additional_items([item.summary]) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_save_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e + + await self.coordinator.async_refresh() + + async def async_update_todo_item(self, item: TodoItem) -> None: + """Update an item to the To-do list.""" + + try: + if TYPE_CHECKING: + assert item.uid + assert item.summary + new_item = CookidooAdditionalItem( + id=item.uid, + name=item.summary, + is_owned=item.status == TodoItemStatus.COMPLETED, + ) + await self.coordinator.cookidoo.edit_additional_items_ownership([new_item]) + await self.coordinator.cookidoo.edit_additional_items([new_item]) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_update_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e + + await self.coordinator.async_refresh() + + async def async_delete_todo_items(self, uids: list[str]) -> None: + """Delete an item from the To-do list.""" + + try: + await self.coordinator.cookidoo.remove_additional_items(uids) + except CookidooException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="todo_delete_item_failed", + translation_placeholders={"count": str(len(uids))}, + ) from e + + await self.coordinator.async_refresh() diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index b074ff714f6..930bda4e81b 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -113,6 +113,7 @@ FLOWS = { "color_extractor", "comelit", "control4", + "cookidoo", "coolmaster", "cpuspeed", "crownstone", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index fcd974534af..ecbe3f0dcbf 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1044,6 +1044,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "cookidoo": { + "name": "Cookidoo", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "coolmaster": { "name": "CoolMasterNet", "integration_type": "hub", diff --git a/mypy.ini b/mypy.ini index a0c441c44f9..2d8e0ea3f61 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1124,6 +1124,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.cookidoo.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.counter.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 4ee02e13695..8f4705e878e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -704,6 +704,9 @@ connect-box==0.3.1 # homeassistant.components.xiaomi_miio construct==2.10.68 +# homeassistant.components.cookidoo +cookidoo-api==0.10.0 + # homeassistant.components.backup # homeassistant.components.utility_meter cronsim==2.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f7faaa3ae0d..3a88a5a2d41 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -600,6 +600,9 @@ colorthief==0.2.1 # homeassistant.components.xiaomi_miio construct==2.10.68 +# homeassistant.components.cookidoo +cookidoo-api==0.10.0 + # homeassistant.components.backup # homeassistant.components.utility_meter cronsim==2.6 diff --git a/tests/components/cookidoo/__init__.py b/tests/components/cookidoo/__init__.py new file mode 100644 index 00000000000..043f627ecc6 --- /dev/null +++ b/tests/components/cookidoo/__init__.py @@ -0,0 +1,15 @@ +"""Tests for the Cookidoo integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Mock setup of the cookidoo integration.""" + cookidoo_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(cookidoo_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/cookidoo/conftest.py b/tests/components/cookidoo/conftest.py new file mode 100644 index 00000000000..68700967d35 --- /dev/null +++ b/tests/components/cookidoo/conftest.py @@ -0,0 +1,76 @@ +"""Common fixtures for the Cookidoo tests.""" + +from collections.abc import Generator +from typing import cast +from unittest.mock import AsyncMock, patch + +from cookidoo_api import ( + CookidooAdditionalItem, + CookidooAuthResponse, + CookidooIngredientItem, +) +import pytest + +from homeassistant.components.cookidoo.const import DOMAIN +from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD + +from tests.common import MockConfigEntry, load_json_object_fixture + +EMAIL = "test-email" +PASSWORD = "test-password" +COUNTRY = "CH" +LANGUAGE = "de-CH" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.cookidoo.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_cookidoo_client() -> Generator[AsyncMock]: + """Mock a Cookidoo client.""" + with ( + patch( + "homeassistant.components.cookidoo.Cookidoo", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.cookidoo.config_flow.Cookidoo", + new=mock_client, + ), + ): + client = mock_client.return_value + client.login.return_value = cast(CookidooAuthResponse, {"name": "Cookidoo"}) + client.get_ingredient_items.return_value = [ + CookidooIngredientItem(**item) + for item in load_json_object_fixture("ingredient_items.json", DOMAIN)[ + "data" + ] + ] + client.get_additional_items.return_value = [ + CookidooAdditionalItem(**item) + for item in load_json_object_fixture("additional_items.json", DOMAIN)[ + "data" + ] + ] + yield client + + +@pytest.fixture(name="cookidoo_config_entry") +def mock_cookidoo_config_entry() -> MockConfigEntry: + """Mock cookidoo configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: EMAIL, + CONF_PASSWORD: PASSWORD, + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + }, + entry_id="01JBVVVJ87F6G5V0QJX6HBC94T", + ) diff --git a/tests/components/cookidoo/fixtures/additional_items.json b/tests/components/cookidoo/fixtures/additional_items.json new file mode 100644 index 00000000000..97cd206f6ad --- /dev/null +++ b/tests/components/cookidoo/fixtures/additional_items.json @@ -0,0 +1,9 @@ +{ + "data": [ + { + "id": "unique_id_tomaten", + "name": "Tomaten", + "is_owned": false + } + ] +} diff --git a/tests/components/cookidoo/fixtures/ingredient_items.json b/tests/components/cookidoo/fixtures/ingredient_items.json new file mode 100644 index 00000000000..7fbeb90e91a --- /dev/null +++ b/tests/components/cookidoo/fixtures/ingredient_items.json @@ -0,0 +1,10 @@ +{ + "data": [ + { + "id": "unique_id_mehl", + "name": "Mehl", + "description": "200 g", + "is_owned": false + } + ] +} diff --git a/tests/components/cookidoo/snapshots/test_todo.ambr b/tests/components/cookidoo/snapshots/test_todo.ambr new file mode 100644 index 00000000000..965cbb0adde --- /dev/null +++ b/tests/components/cookidoo/snapshots/test_todo.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_todo[todo.cookidoo_additional_purchases-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.cookidoo_additional_purchases', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Additional purchases', + 'platform': 'cookidoo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'additional_item_list', + 'unique_id': '01JBVVVJ87F6G5V0QJX6HBC94T_additional_items', + 'unit_of_measurement': None, + }) +# --- +# name: test_todo[todo.cookidoo_additional_purchases-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cookidoo Additional purchases', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.cookidoo_additional_purchases', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_todo[todo.cookidoo_shopping_list-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.cookidoo_shopping_list', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Shopping list', + 'platform': 'cookidoo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'ingredient_list', + 'unique_id': '01JBVVVJ87F6G5V0QJX6HBC94T_ingredients', + 'unit_of_measurement': None, + }) +# --- +# name: test_todo[todo.cookidoo_shopping_list-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Cookidoo Shopping list', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.cookidoo_shopping_list', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- diff --git a/tests/components/cookidoo/test_config_flow.py b/tests/components/cookidoo/test_config_flow.py new file mode 100644 index 00000000000..0da8afe7d07 --- /dev/null +++ b/tests/components/cookidoo/test_config_flow.py @@ -0,0 +1,182 @@ +"""Test the Cookidoo config flow.""" + +from unittest.mock import AsyncMock + +from cookidoo_api.exceptions import ( + CookidooAuthException, + CookidooException, + CookidooRequestException, +) +import pytest + +from homeassistant.components.cookidoo.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import COUNTRY, EMAIL, LANGUAGE, PASSWORD + +from tests.common import MockConfigEntry + +MOCK_DATA_USER_STEP = { + CONF_EMAIL: EMAIL, + CONF_PASSWORD: PASSWORD, + CONF_COUNTRY: COUNTRY, +} + +MOCK_DATA_LANGUAGE_STEP = { + CONF_LANGUAGE: LANGUAGE, +} + + +async def test_flow_user_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_cookidoo_client: AsyncMock +) -> None: + """Test we get the user flow and create entry with success.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Cookidoo" + assert result["data"] == {**MOCK_DATA_USER_STEP, **MOCK_DATA_LANGUAGE_STEP} + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooAuthException(), "invalid_auth"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_user_init_data_unknown_error_and_recover_on_step_1( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.login.side_effect = raise_error + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].title == "Cookidoo" + + assert result["data"] == {**MOCK_DATA_USER_STEP, **MOCK_DATA_LANGUAGE_STEP} + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooAuthException(), "invalid_auth"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_user_init_data_unknown_error_and_recover_on_step_2( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.get_additional_items.side_effect = raise_error + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.get_additional_items.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LANGUAGE_STEP, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].title == "Cookidoo" + + assert result["data"] == {**MOCK_DATA_USER_STEP, **MOCK_DATA_LANGUAGE_STEP} + + +async def test_flow_user_init_data_already_configured( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test we abort user data set when entry is already configured.""" + + cookidoo_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "user"} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_USER_STEP, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/cookidoo/test_init.py b/tests/components/cookidoo/test_init.py new file mode 100644 index 00000000000..c73295bcd96 --- /dev/null +++ b/tests/components/cookidoo/test_init.py @@ -0,0 +1,102 @@ +"""Unit tests for the cookidoo integration.""" + +from unittest.mock import AsyncMock + +from cookidoo_api import CookidooAuthException, CookidooRequestException +import pytest + +from homeassistant.components.cookidoo.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("mock_cookidoo_client") +async def test_load_unload( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test loading and unloading of the config entry.""" + await setup_integration(hass, cookidoo_config_entry) + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(cookidoo_config_entry.entry_id) + assert cookidoo_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("exception", "status"), + [ + (CookidooRequestException, ConfigEntryState.SETUP_RETRY), + (CookidooAuthException, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_init_failure( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + status: ConfigEntryState, + exception: Exception, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test an initialization error on integration load.""" + mock_cookidoo_client.login.side_effect = exception + await setup_integration(hass, cookidoo_config_entry) + assert cookidoo_config_entry.state == status + + +@pytest.mark.parametrize( + "cookidoo_method", + [ + "get_ingredient_items", + "get_additional_items", + ], +) +async def test_config_entry_not_ready( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, + cookidoo_method: str, +) -> None: + """Test config entry not ready.""" + getattr( + mock_cookidoo_client, cookidoo_method + ).side_effect = CookidooRequestException() + cookidoo_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(cookidoo_config_entry.entry_id) + await hass.async_block_till_done() + + assert cookidoo_config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.parametrize( + ("exception", "status"), + [ + (None, ConfigEntryState.LOADED), + (CookidooRequestException, ConfigEntryState.SETUP_RETRY), + (CookidooAuthException, ConfigEntryState.SETUP_ERROR), + ], +) +async def test_config_entry_not_ready_auth_error( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, + exception: Exception | None, + status: ConfigEntryState, +) -> None: + """Test config entry not ready from authentication error.""" + + mock_cookidoo_client.get_ingredient_items.side_effect = CookidooAuthException + mock_cookidoo_client.refresh_token.side_effect = exception + + cookidoo_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(cookidoo_config_entry.entry_id) + await hass.async_block_till_done() + + assert cookidoo_config_entry.state is status diff --git a/tests/components/cookidoo/test_todo.py b/tests/components/cookidoo/test_todo.py new file mode 100644 index 00000000000..0e60a86d225 --- /dev/null +++ b/tests/components/cookidoo/test_todo.py @@ -0,0 +1,292 @@ +"""Test for todo platform of the Cookidoo integration.""" + +from collections.abc import Generator +import re +from unittest.mock import AsyncMock, patch + +from cookidoo_api import ( + CookidooAdditionalItem, + CookidooIngredientItem, + CookidooRequestException, +) +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoItemStatus, + TodoServices, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +def todo_only() -> Generator[None]: + """Enable only the todo platform.""" + with patch( + "homeassistant.components.cookidoo.PLATFORMS", + [Platform.TODO], + ): + yield + + +@pytest.mark.usefixtures("mock_cookidoo_client") +async def test_todo( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Snapshot test states of todo platform.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform( + hass, entity_registry, snapshot, cookidoo_config_entry.entry_id + ) + + +async def test_update_ingredient( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update ingredient item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_mehl", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_shopping_list"}, + blocking=True, + ) + + mock_cookidoo_client.edit_ingredient_items_ownership.assert_called_once_with( + [ + CookidooIngredientItem( + id="unique_id_mehl", + name="", + description="", + is_owned=True, + ) + ], + ) + + +async def test_update_ingredient_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update ingredient with exception.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + mock_cookidoo_client.edit_ingredient_items_ownership.side_effect = ( + CookidooRequestException + ) + with pytest.raises( + HomeAssistantError, match="Failed to update Mehl in Cookidoo shopping list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_mehl", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_shopping_list"}, + blocking=True, + ) + + +async def test_add_additional_item( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test add additional item to list.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + service_data={ATTR_ITEM: "Äpfel"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + mock_cookidoo_client.add_additional_items.assert_called_once_with( + ["Äpfel"], + ) + + +async def test_add_additional_item_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test add additional item to list with exception.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + mock_cookidoo_client.add_additional_items.side_effect = CookidooRequestException + with pytest.raises( + HomeAssistantError, match="Failed to save Äpfel to Cookidoo shopping list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + service_data={ATTR_ITEM: "Äpfel"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + +async def test_update_additional_item( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update additional item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_tomaten", + ATTR_RENAME: "Peperoni", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + mock_cookidoo_client.edit_additional_items_ownership.assert_called_once_with( + [ + CookidooAdditionalItem( + id="unique_id_tomaten", + name="Peperoni", + is_owned=True, + ) + ], + ) + mock_cookidoo_client.edit_additional_items.assert_called_once_with( + [ + CookidooAdditionalItem( + id="unique_id_tomaten", + name="Peperoni", + is_owned=True, + ) + ], + ) + + +async def test_update_additional_item_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test update additional item with exception.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + mock_cookidoo_client.edit_additional_items_ownership.side_effect = ( + CookidooRequestException + ) + mock_cookidoo_client.edit_additional_items.side_effect = CookidooRequestException + with pytest.raises( + HomeAssistantError, match="Failed to update Peperoni in Cookidoo shopping list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "unique_id_tomaten", + ATTR_RENAME: "Peperoni", + ATTR_STATUS: TodoItemStatus.COMPLETED, + }, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + +async def test_delete_additional_items( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test delete additional item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + service_data={ATTR_ITEM: "unique_id_tomaten"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) + + mock_cookidoo_client.remove_additional_items.assert_called_once_with( + ["unique_id_tomaten"] + ) + + +async def test_delete_additional_items_exception( + hass: HomeAssistant, + cookidoo_config_entry: MockConfigEntry, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test delete additional item.""" + + await setup_integration(hass, cookidoo_config_entry) + + assert cookidoo_config_entry.state is ConfigEntryState.LOADED + mock_cookidoo_client.remove_additional_items.side_effect = CookidooRequestException + with pytest.raises( + HomeAssistantError, + match=re.escape("Failed to delete 1 item(s) from Cookidoo shopping list"), + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + service_data={ATTR_ITEM: "unique_id_tomaten"}, + target={ATTR_ENTITY_ID: "todo.cookidoo_additional_purchases"}, + blocking=True, + ) From fd811c85e9e69d8f3399f9891d8b7ec628371353 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:28:08 +0100 Subject: [PATCH 148/677] Migrate wemo light tests to use Kelvin (#133031) --- tests/components/wemo/test_light_bridge.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/wemo/test_light_bridge.py b/tests/components/wemo/test_light_bridge.py index 48be2823750..4deddeaba94 100644 --- a/tests/components/wemo/test_light_bridge.py +++ b/tests/components/wemo/test_light_bridge.py @@ -11,7 +11,7 @@ from homeassistant.components.homeassistant import ( ) from homeassistant.components.light import ( ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, ColorMode, @@ -116,7 +116,7 @@ async def test_light_update_entity( blocking=True, ) state = hass.states.get(wemo_entity.entity_id) - assert state.attributes.get(ATTR_COLOR_TEMP) == 432 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 2314 assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ColorMode.COLOR_TEMP] assert state.attributes.get(ATTR_COLOR_MODE) == ColorMode.COLOR_TEMP assert state.state == STATE_ON From f0391f4963adcb0a6b2bb2f5ea135af340a0892c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:28:42 +0100 Subject: [PATCH 149/677] Migrate tradfri light tests to use Kelvin (#133030) --- tests/components/tradfri/test_light.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/components/tradfri/test_light.py b/tests/components/tradfri/test_light.py index 887b043689f..c7091e77343 100644 --- a/tests/components/tradfri/test_light.py +++ b/tests/components/tradfri/test_light.py @@ -9,10 +9,10 @@ from pytradfri.device import Device from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, DOMAIN as LIGHT_DOMAIN, ColorMode, @@ -67,9 +67,9 @@ def bulb_cws() -> str: "light.test_ws", { ATTR_BRIGHTNESS: 250, - ATTR_COLOR_TEMP: 400, - ATTR_MIN_MIREDS: 250, - ATTR_MAX_MIREDS: 454, + ATTR_COLOR_TEMP_KELVIN: 2500, + ATTR_MAX_COLOR_TEMP_KELVIN: 4000, + ATTR_MIN_COLOR_TEMP_KELVIN: 2202, ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP], ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, }, From de35bfce77dfe1a2c76dd4a0d2bc2a5d53e2aefb Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:29:15 +0100 Subject: [PATCH 150/677] Migrate yeelight light tests to use Kelvin (#133033) --- tests/components/yeelight/test_light.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/tests/components/yeelight/test_light.py b/tests/components/yeelight/test_light.py index f4ff82e7757..274d0a158f0 100644 --- a/tests/components/yeelight/test_light.py +++ b/tests/components/yeelight/test_light.py @@ -24,7 +24,7 @@ from yeelight.main import _MODEL_SPECS from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, @@ -107,7 +107,6 @@ from homeassistant.util.color import ( color_RGB_to_hs, color_RGB_to_xy, color_temperature_kelvin_to_mired, - color_temperature_mired_to_kelvin, ) from . import ( @@ -289,7 +288,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - # turn_on color_temp brightness = 100 - color_temp = 200 + color_temp = 5000 transition = 1 mocked_bulb.last_properties["power"] = "off" await hass.services.async_call( @@ -298,7 +297,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - { ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS: brightness, - ATTR_COLOR_TEMP: color_temp, + ATTR_COLOR_TEMP_KELVIN: color_temp, ATTR_FLASH: FLASH_LONG, ATTR_EFFECT: EFFECT_STOP, ATTR_TRANSITION: transition, @@ -316,7 +315,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - brightness / 255 * 100, duration=transition * 1000, light_type=LightType.Main ) mocked_bulb.async_set_color_temp.assert_called_once_with( - color_temperature_mired_to_kelvin(color_temp), + color_temp, duration=transition * 1000, light_type=LightType.Main, ) @@ -327,7 +326,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - # turn_on color_temp - flash short brightness = 100 - color_temp = 200 + color_temp = 5000 transition = 1 mocked_bulb.async_start_music.reset_mock() mocked_bulb.async_set_brightness.reset_mock() @@ -342,7 +341,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - { ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS: brightness, - ATTR_COLOR_TEMP: color_temp, + ATTR_COLOR_TEMP_KELVIN: color_temp, ATTR_FLASH: FLASH_SHORT, ATTR_EFFECT: EFFECT_STOP, ATTR_TRANSITION: transition, @@ -360,7 +359,7 @@ async def test_services(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) - brightness / 255 * 100, duration=transition * 1000, light_type=LightType.Main ) mocked_bulb.async_set_color_temp.assert_called_once_with( - color_temperature_mired_to_kelvin(color_temp), + color_temp, duration=transition * 1000, light_type=LightType.Main, ) @@ -691,7 +690,7 @@ async def test_state_already_set_avoid_ratelimit(hass: HomeAssistant) -> None: await hass.services.async_call( "light", SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP: 250}, + {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) assert mocked_bulb.async_set_hsv.mock_calls == [] @@ -707,7 +706,7 @@ async def test_state_already_set_avoid_ratelimit(hass: HomeAssistant) -> None: await hass.services.async_call( "light", SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP: 250}, + {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) assert mocked_bulb.async_set_hsv.mock_calls == [] @@ -720,7 +719,7 @@ async def test_state_already_set_avoid_ratelimit(hass: HomeAssistant) -> None: await hass.services.async_call( "light", SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP: 250}, + {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) assert mocked_bulb.async_set_hsv.mock_calls == [] From e276f8ee896b422701ff8ac13c9f1c6cd040882e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:32:39 +0100 Subject: [PATCH 151/677] Migrate zwave_js light tests to use Kelvin (#133034) --- tests/components/zwave_js/test_light.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/components/zwave_js/test_light.py b/tests/components/zwave_js/test_light.py index 4c725c6dc29..21a6c0a8fae 100644 --- a/tests/components/zwave_js/test_light.py +++ b/tests/components/zwave_js/test_light.py @@ -7,10 +7,10 @@ from zwave_js_server.event import Event from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_SUPPORTED_COLOR_MODES, @@ -51,8 +51,8 @@ async def test_light( assert state assert state.state == STATE_OFF - assert state.attributes[ATTR_MIN_MIREDS] == 153 - assert state.attributes[ATTR_MAX_MIREDS] == 370 + assert state.attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 + assert state.attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -130,7 +130,7 @@ async def test_light( assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_MODE] == "color_temp" assert state.attributes[ATTR_BRIGHTNESS] == 255 - assert state.attributes[ATTR_COLOR_TEMP] == 370 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 2702 assert state.attributes[ATTR_RGB_COLOR] is not None # Test turning on with same brightness @@ -256,7 +256,7 @@ async def test_light( assert state.attributes[ATTR_COLOR_MODE] == "hs" assert state.attributes[ATTR_BRIGHTNESS] == 255 assert state.attributes[ATTR_RGB_COLOR] == (255, 76, 255) - assert state.attributes[ATTR_COLOR_TEMP] is None + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] is None client.async_send_command.reset_mock() @@ -293,7 +293,7 @@ async def test_light( await hass.services.async_call( "light", "turn_on", - {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP: 170}, + {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP_KELVIN: 5881}, blocking=True, ) @@ -358,14 +358,14 @@ async def test_light( assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_MODE] == "color_temp" assert state.attributes[ATTR_BRIGHTNESS] == 255 - assert state.attributes[ATTR_COLOR_TEMP] == 170 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 5881 assert ATTR_RGB_COLOR in state.attributes # Test turning on with same color temp await hass.services.async_call( "light", "turn_on", - {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP: 170}, + {"entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, ATTR_COLOR_TEMP_KELVIN: 5881}, blocking=True, ) @@ -379,7 +379,7 @@ async def test_light( "turn_on", { "entity_id": BULB_6_MULTI_COLOR_LIGHT_ENTITY, - ATTR_COLOR_TEMP: 170, + ATTR_COLOR_TEMP_KELVIN: 5881, ATTR_TRANSITION: 35, }, blocking=True, From 483688dba2f93d2bbc263db13a5a5a74f7a86aac Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 20:32:59 +0100 Subject: [PATCH 152/677] Promote Twente Milieu quality scale to silver (#133074) --- .../components/twentemilieu/manifest.json | 1 + .../twentemilieu/quality_scale.yaml | 19 ++++++++----------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/twentemilieu/manifest.json b/homeassistant/components/twentemilieu/manifest.json index 292887c6c5b..c04c5492a40 100644 --- a/homeassistant/components/twentemilieu/manifest.json +++ b/homeassistant/components/twentemilieu/manifest.json @@ -7,5 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["twentemilieu"], + "quality_scale": "silver", "requirements": ["twentemilieu==2.2.0"] } diff --git a/homeassistant/components/twentemilieu/quality_scale.yaml b/homeassistant/components/twentemilieu/quality_scale.yaml index 3d7535a249c..42ff152cb4d 100644 --- a/homeassistant/components/twentemilieu/quality_scale.yaml +++ b/homeassistant/components/twentemilieu/quality_scale.yaml @@ -14,12 +14,9 @@ rules: status: exempt comment: | This integration does not provide additional actions. - docs-high-level-description: - status: todo - comment: | - The introduction can be improved and is missing links to the provider. + docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done entity-event-setup: status: exempt comment: | @@ -51,7 +48,7 @@ rules: data), there is no need to implement parallel updates. test-coverage: done integration-owner: done - docs-installation-parameters: todo + docs-installation-parameters: done docs-configuration-parameters: status: exempt comment: | @@ -95,16 +92,16 @@ rules: status: exempt comment: | This integration doesn't have any cases where raising an issue is needed. - docs-use-cases: todo + docs-use-cases: done docs-supported-devices: status: exempt comment: | This is an service, which doesn't integrate with any devices. docs-supported-functions: done - docs-data-update: todo - docs-known-limitations: todo - docs-troubleshooting: todo - docs-examples: todo + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done # Platinum async-dependency: done From 7c9992f5d34ca0931be1ce610bfa77adf5ffcd0f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:37:32 +0100 Subject: [PATCH 153/677] Migrate demo light tests to use Kelvin (#133003) --- tests/components/demo/test_light.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/tests/components/demo/test_light.py b/tests/components/demo/test_light.py index 8fcdb8a9c2e..b39b09d9307 100644 --- a/tests/components/demo/test_light.py +++ b/tests/components/demo/test_light.py @@ -9,11 +9,10 @@ from homeassistant.components.demo import DOMAIN from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_BRIGHTNESS_PCT, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, - ATTR_KELVIN, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_XY_COLOR, DOMAIN as LIGHT_DOMAIN, @@ -79,25 +78,33 @@ async def test_state_attributes(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_EFFECT: "none", ATTR_COLOR_TEMP: 400}, + { + ATTR_ENTITY_ID: ENTITY_LIGHT, + ATTR_EFFECT: "none", + ATTR_COLOR_TEMP_KELVIN: 2500, + }, blocking=True, ) state = hass.states.get(ENTITY_LIGHT) - assert state.attributes.get(ATTR_COLOR_TEMP) == 400 - assert state.attributes.get(ATTR_MIN_MIREDS) == 153 - assert state.attributes.get(ATTR_MAX_MIREDS) == 500 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 2500 + assert state.attributes.get(ATTR_MAX_COLOR_TEMP_KELVIN) == 6535 + assert state.attributes.get(ATTR_MIN_COLOR_TEMP_KELVIN) == 2000 assert state.attributes.get(ATTR_EFFECT) == "none" await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ENTITY_LIGHT, ATTR_BRIGHTNESS_PCT: 50, ATTR_KELVIN: 3000}, + { + ATTR_ENTITY_ID: ENTITY_LIGHT, + ATTR_BRIGHTNESS_PCT: 50, + ATTR_COLOR_TEMP_KELVIN: 3000, + }, blocking=True, ) state = hass.states.get(ENTITY_LIGHT) - assert state.attributes.get(ATTR_COLOR_TEMP) == 333 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 3000 assert state.attributes.get(ATTR_BRIGHTNESS) == 128 From 708084d3005d935f64f64886ba81cf773a25bac0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:38:13 +0100 Subject: [PATCH 154/677] Migrate switch_as_x light tests to use Kelvin (#133023) --- tests/components/switch_as_x/test_light.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/switch_as_x/test_light.py b/tests/components/switch_as_x/test_light.py index 5e48b7db965..5f724a2d7e7 100644 --- a/tests/components/switch_as_x/test_light.py +++ b/tests/components/switch_as_x/test_light.py @@ -3,7 +3,7 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, @@ -57,7 +57,7 @@ async def test_default_state(hass: HomeAssistant) -> None: assert state.attributes["supported_features"] == 0 assert state.attributes.get(ATTR_BRIGHTNESS) is None assert state.attributes.get(ATTR_HS_COLOR) is None - assert state.attributes.get(ATTR_COLOR_TEMP) is None + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None assert state.attributes.get(ATTR_EFFECT_LIST) is None assert state.attributes.get(ATTR_EFFECT) is None assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ColorMode.ONOFF] From b189bc6146b2930231eda5d67afa1519ebd22173 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:38:49 +0100 Subject: [PATCH 155/677] Migrate smartthings light tests to use Kelvin (#133022) --- tests/components/smartthings/test_light.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/components/smartthings/test_light.py b/tests/components/smartthings/test_light.py index 22b181a3645..b46188b5b5f 100644 --- a/tests/components/smartthings/test_light.py +++ b/tests/components/smartthings/test_light.py @@ -9,7 +9,7 @@ import pytest from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, @@ -101,8 +101,8 @@ async def test_entity_state(hass: HomeAssistant, light_devices) -> None: assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION assert state.attributes[ATTR_BRIGHTNESS] == 255 assert ATTR_HS_COLOR not in state.attributes[ATTR_HS_COLOR] - assert isinstance(state.attributes[ATTR_COLOR_TEMP], int) - assert state.attributes[ATTR_COLOR_TEMP] == 222 + assert isinstance(state.attributes[ATTR_COLOR_TEMP_KELVIN], int) + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 4500 async def test_entity_and_device_attributes( @@ -273,7 +273,7 @@ async def test_turn_on_with_color_temp(hass: HomeAssistant, light_devices) -> No await hass.services.async_call( "light", "turn_on", - {ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_COLOR_TEMP: 300}, + {ATTR_ENTITY_ID: "light.color_dimmer_2", ATTR_COLOR_TEMP_KELVIN: 3333}, blocking=True, ) # This test schedules and update right after the call @@ -282,7 +282,7 @@ async def test_turn_on_with_color_temp(hass: HomeAssistant, light_devices) -> No state = hass.states.get("light.color_dimmer_2") assert state is not None assert state.state == "on" - assert state.attributes[ATTR_COLOR_TEMP] == 300 + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3333 async def test_update_from_signal(hass: HomeAssistant, device_factory) -> None: From 3baa432bae94ddb635f0bd357ce3ace8596c2ea0 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Thu, 12 Dec 2024 20:48:01 +0100 Subject: [PATCH 156/677] Use runtime_data in velbus (#132988) --- homeassistant/components/velbus/__init__.py | 36 +++++++++++-------- .../components/velbus/binary_sensor.py | 11 +++--- homeassistant/components/velbus/button.py | 13 +++---- homeassistant/components/velbus/climate.py | 12 ++++--- homeassistant/components/velbus/cover.py | 13 +++---- .../components/velbus/diagnostics.py | 11 +++--- homeassistant/components/velbus/light.py | 16 +++++---- .../components/velbus/quality_scale.yaml | 2 +- homeassistant/components/velbus/select.py | 13 +++---- homeassistant/components/velbus/sensor.py | 10 +++--- homeassistant/components/velbus/services.py | 32 ++++++++++++----- homeassistant/components/velbus/switch.py | 13 +++---- 12 files changed, 104 insertions(+), 78 deletions(-) diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index fec6395c890..f8426bc4130 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import asyncio +from dataclasses import dataclass import logging import os import shutil @@ -34,6 +36,16 @@ PLATFORMS = [ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) +type VelbusConfigEntry = ConfigEntry[VelbusData] + + +@dataclass +class VelbusData: + """Runtime data for the Velbus config entry.""" + + controller: Velbus + connect_task: asyncio.Task + async def velbus_connect_task( controller: Velbus, hass: HomeAssistant, entry_id: str @@ -67,19 +79,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bool: """Establish connection with velbus.""" - hass.data.setdefault(DOMAIN, {}) - controller = Velbus( entry.data[CONF_PORT], cache_dir=hass.config.path(STORAGE_DIR, f"velbuscache-{entry.entry_id}"), ) - hass.data[DOMAIN][entry.entry_id] = {} - hass.data[DOMAIN][entry.entry_id]["cntrl"] = controller - hass.data[DOMAIN][entry.entry_id]["tsk"] = hass.async_create_task( - velbus_connect_task(controller, hass, entry.entry_id) - ) + task = hass.async_create_task(velbus_connect_task(controller, hass, entry.entry_id)) + entry.runtime_data = VelbusData(controller=controller, connect_task=task) _migrate_device_identifiers(hass, entry.entry_id) @@ -88,17 +95,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bool: """Unload (close) the velbus connection.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - await hass.data[DOMAIN][entry.entry_id]["cntrl"].stop() - hass.data[DOMAIN].pop(entry.entry_id) - if not hass.data[DOMAIN]: - hass.data.pop(DOMAIN) + await entry.runtime_data.controller.stop() return unload_ok -async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_remove_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> None: """Remove the velbus entry, so we also have to cleanup the cache dir.""" await hass.async_add_executor_job( shutil.rmtree, @@ -106,7 +110,9 @@ async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: ) -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, config_entry: VelbusConfigEntry +) -> bool: """Migrate old entry.""" _LOGGER.debug("Migrating from version %s", config_entry.version) cache_path = hass.config.path(STORAGE_DIR, f"velbuscache-{config_entry.entry_id}/") diff --git a/homeassistant/components/velbus/binary_sensor.py b/homeassistant/components/velbus/binary_sensor.py index 5f363c1a035..dd65ff7d50d 100644 --- a/homeassistant/components/velbus/binary_sensor.py +++ b/homeassistant/components/velbus/binary_sensor.py @@ -3,24 +3,23 @@ from velbusaio.channels import Button as VelbusButton from homeassistant.components.binary_sensor import BinarySensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] + await entry.runtime_data.connect_task async_add_entities( - VelbusBinarySensor(channel) for channel in cntrl.get_all("binary_sensor") + VelbusBinarySensor(channel) + for channel in entry.runtime_data.controller.get_all_binary_sensor() ) diff --git a/homeassistant/components/velbus/button.py b/homeassistant/components/velbus/button.py index bd5b81d67a0..2b908c188b8 100644 --- a/homeassistant/components/velbus/button.py +++ b/homeassistant/components/velbus/button.py @@ -8,24 +8,25 @@ from velbusaio.channels import ( ) from homeassistant.components.button import ButtonEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusButton(channel) for channel in cntrl.get_all("button")) + await entry.runtime_data.connect_task + async_add_entities( + VelbusButton(channel) + for channel in entry.runtime_data.controller.get_all_button() + ) class VelbusButton(VelbusEntity, ButtonEntity): diff --git a/homeassistant/components/velbus/climate.py b/homeassistant/components/velbus/climate.py index 18142482539..fa8391d4199 100644 --- a/homeassistant/components/velbus/climate.py +++ b/homeassistant/components/velbus/climate.py @@ -11,25 +11,27 @@ from homeassistant.components.climate import ( ClimateEntityFeature, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import VelbusConfigEntry from .const import DOMAIN, PRESET_MODES from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusClimate(channel) for channel in cntrl.get_all("climate")) + await entry.runtime_data.connect_task + async_add_entities( + VelbusClimate(channel) + for channel in entry.runtime_data.controller.get_all_climate() + ) class VelbusClimate(VelbusEntity, ClimateEntity): diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 8b9d927f3d7..7850e7b1895 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -11,23 +11,24 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusCover(channel) for channel in cntrl.get_all("cover")) + await entry.runtime_data.connect_task + async_add_entities( + VelbusCover(channel) + for channel in entry.runtime_data.controller.get_all_cover() + ) class VelbusCover(VelbusEntity, CoverEntity): diff --git a/homeassistant/components/velbus/diagnostics.py b/homeassistant/components/velbus/diagnostics.py index f7e29e2f57e..75b7669edec 100644 --- a/homeassistant/components/velbus/diagnostics.py +++ b/homeassistant/components/velbus/diagnostics.py @@ -7,18 +7,17 @@ from typing import Any from velbusaio.channels import Channel as VelbusChannel from velbusaio.module import Module as VelbusModule -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry -from .const import DOMAIN +from . import VelbusConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: VelbusConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - controller = hass.data[DOMAIN][entry.entry_id]["cntrl"] + controller = entry.runtime_data.controller data: dict[str, Any] = {"entry": entry.as_dict(), "modules": []} for module in controller.get_modules().values(): data["modules"].append(_build_module_diagnostics_info(module)) @@ -26,10 +25,10 @@ async def async_get_config_entry_diagnostics( async def async_get_device_diagnostics( - hass: HomeAssistant, entry: ConfigEntry, device: DeviceEntry + hass: HomeAssistant, entry: VelbusConfigEntry, device: DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device entry.""" - controller = hass.data[DOMAIN][entry.entry_id]["cntrl"] + controller = entry.runtime_data.controller channel = list(next(iter(device.identifiers)))[1] modules = controller.get_modules() return _build_module_diagnostics_info(modules[int(channel)]) diff --git a/homeassistant/components/velbus/light.py b/homeassistant/components/velbus/light.py index 7145576be6a..0df4f70d753 100644 --- a/homeassistant/components/velbus/light.py +++ b/homeassistant/components/velbus/light.py @@ -20,28 +20,30 @@ from homeassistant.components.light import ( LightEntity, LightEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] + await entry.runtime_data.connect_task entities: list[Entity] = [ - VelbusLight(channel) for channel in cntrl.get_all("light") + VelbusLight(channel) + for channel in entry.runtime_data.controller.get_all_light() ] - entities.extend(VelbusButtonLight(channel) for channel in cntrl.get_all("led")) + entities.extend( + VelbusButtonLight(channel) + for channel in entry.runtime_data.controller.get_all_led() + ) async_add_entities(entities) diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index adea896a1c6..68fe5ead781 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -23,7 +23,7 @@ rules: entity-event-setup: todo entity-unique-id: done has-entity-name: todo - runtime-data: todo + runtime-data: done test-before-configure: done test-before-setup: todo unique-config-entry: diff --git a/homeassistant/components/velbus/select.py b/homeassistant/components/velbus/select.py index 7eecb85fc47..f0ad509270c 100644 --- a/homeassistant/components/velbus/select.py +++ b/homeassistant/components/velbus/select.py @@ -3,24 +3,25 @@ from velbusaio.channels import SelectedProgram from homeassistant.components.select import SelectEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus select based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusSelect(channel) for channel in cntrl.get_all("select")) + await entry.runtime_data.connect_task + async_add_entities( + VelbusSelect(channel) + for channel in entry.runtime_data.controller.get_all_select() + ) class VelbusSelect(VelbusEntity, SelectEntity): diff --git a/homeassistant/components/velbus/sensor.py b/homeassistant/components/velbus/sensor.py index b765eebcddc..598287839c1 100644 --- a/homeassistant/components/velbus/sensor.py +++ b/homeassistant/components/velbus/sensor.py @@ -9,24 +9,22 @@ from homeassistant.components.sensor import ( SensorEntity, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] + await entry.runtime_data.connect_task entities = [] - for channel in cntrl.get_all("sensor"): + for channel in entry.runtime_data.controller.get_all_sensor(): entities.append(VelbusSensor(channel)) if channel.is_counter_channel(): entities.append(VelbusSensor(channel, True)) diff --git a/homeassistant/components/velbus/services.py b/homeassistant/components/velbus/services.py index 83633eb66bc..3f0b1bd6cdb 100644 --- a/homeassistant/components/velbus/services.py +++ b/homeassistant/components/velbus/services.py @@ -5,6 +5,7 @@ from __future__ import annotations from contextlib import suppress import os import shutil +from typing import TYPE_CHECKING import voluptuous as vol @@ -13,6 +14,9 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv from homeassistant.helpers.storage import STORAGE_DIR +if TYPE_CHECKING: + from . import VelbusConfigEntry + from .const import ( CONF_INTERFACE, CONF_MEMO_TEXT, @@ -35,20 +39,32 @@ def setup_services(hass: HomeAssistant) -> None: "The interface provided is not defined as a port in a Velbus integration" ) + def get_config_entry(interface: str) -> VelbusConfigEntry | None: + for config_entry in hass.config_entries.async_entries(DOMAIN): + if "port" in config_entry.data and config_entry.data["port"] == interface: + return config_entry + return None + async def scan(call: ServiceCall) -> None: - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].scan() + """Handle a scan service call.""" + entry = get_config_entry(call.data[CONF_INTERFACE]) + if entry: + await entry.runtime_data.controller.scan() async def syn_clock(call: ServiceCall) -> None: - await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].sync_clock() + """Handle a sync clock service call.""" + entry = get_config_entry(call.data[CONF_INTERFACE]) + if entry: + await entry.runtime_data.controller.sync_clock() async def set_memo_text(call: ServiceCall) -> None: """Handle Memo Text service call.""" - memo_text = call.data[CONF_MEMO_TEXT] - await ( - hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] - .get_module(call.data[CONF_ADDRESS]) - .set_memo_text(memo_text.async_render()) - ) + entry = get_config_entry(call.data[CONF_INTERFACE]) + if entry: + memo_text = call.data[CONF_MEMO_TEXT] + module = entry.runtime_data.controller.get_module(call.data[CONF_ADDRESS]) + if module: + await module.set_memo_text(memo_text.async_render()) async def clear_cache(call: ServiceCall) -> None: """Handle a clear cache service call.""" diff --git a/homeassistant/components/velbus/switch.py b/homeassistant/components/velbus/switch.py index 1e6014b8d90..f3bd009d25e 100644 --- a/homeassistant/components/velbus/switch.py +++ b/homeassistant/components/velbus/switch.py @@ -5,23 +5,24 @@ from typing import Any from velbusaio.channels import Relay as VelbusRelay from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import VelbusConfigEntry from .entity import VelbusEntity, api_call async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VelbusConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await hass.data[DOMAIN][entry.entry_id]["tsk"] - cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"] - async_add_entities(VelbusSwitch(channel) for channel in cntrl.get_all("switch")) + await entry.runtime_data.connect_task + async_add_entities( + VelbusSwitch(channel) + for channel in entry.runtime_data.controller.get_all_switch() + ) class VelbusSwitch(VelbusEntity, SwitchEntity): From 839f06b2dc1f39ec9785888645c8a262723f4f7b Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 21:12:11 +0100 Subject: [PATCH 157/677] Small improvements to the AdGuard tests (#133073) --- tests/components/adguard/__init__.py | 2 +- tests/components/adguard/test_config_flow.py | 87 ++++++++++---------- 2 files changed, 46 insertions(+), 43 deletions(-) diff --git a/tests/components/adguard/__init__.py b/tests/components/adguard/__init__.py index 318e881ef2f..4d8ae091dc5 100644 --- a/tests/components/adguard/__init__.py +++ b/tests/components/adguard/__init__.py @@ -1 +1 @@ -"""Tests for the AdGuard Home component.""" +"""Tests for the AdGuard Home integration.""" diff --git a/tests/components/adguard/test_config_flow.py b/tests/components/adguard/test_config_flow.py index 6644a4ca20f..bd0f1b0a08f 100644 --- a/tests/components/adguard/test_config_flow.py +++ b/tests/components/adguard/test_config_flow.py @@ -59,9 +59,9 @@ async def test_connection_error( ) assert result - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert result.get("errors") == {"base": "cannot_connect"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} async def test_full_flow_implementation( @@ -83,25 +83,27 @@ async def test_full_flow_implementation( ) assert result - assert result.get("flow_id") - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["flow_id"] + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=FIXTURE_USER_INPUT ) - assert result2 - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == FIXTURE_USER_INPUT[CONF_HOST] + assert result + assert result["type"] is FlowResultType.CREATE_ENTRY - data = result2.get("data") - assert data - assert data[CONF_HOST] == FIXTURE_USER_INPUT[CONF_HOST] - assert data[CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] - assert data[CONF_PORT] == FIXTURE_USER_INPUT[CONF_PORT] - assert data[CONF_SSL] == FIXTURE_USER_INPUT[CONF_SSL] - assert data[CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME] - assert data[CONF_VERIFY_SSL] == FIXTURE_USER_INPUT[CONF_VERIFY_SSL] + config_entry = result["result"] + assert config_entry.title == FIXTURE_USER_INPUT[CONF_HOST] + assert config_entry.data == { + CONF_HOST: FIXTURE_USER_INPUT[CONF_HOST], + CONF_PASSWORD: FIXTURE_USER_INPUT[CONF_PASSWORD], + CONF_PORT: FIXTURE_USER_INPUT[CONF_PORT], + CONF_SSL: FIXTURE_USER_INPUT[CONF_SSL], + CONF_USERNAME: FIXTURE_USER_INPUT[CONF_USERNAME], + CONF_VERIFY_SSL: FIXTURE_USER_INPUT[CONF_VERIFY_SSL], + } + assert not config_entry.options async def test_integration_already_exists(hass: HomeAssistant) -> None: @@ -116,8 +118,8 @@ async def test_integration_already_exists(hass: HomeAssistant) -> None: context={"source": config_entries.SOURCE_USER}, ) assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_hassio_already_configured(hass: HomeAssistant) -> None: @@ -141,8 +143,8 @@ async def test_hassio_already_configured(hass: HomeAssistant) -> None: context={"source": config_entries.SOURCE_HASSIO}, ) assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_hassio_ignored(hass: HomeAssistant) -> None: @@ -166,8 +168,8 @@ async def test_hassio_ignored(hass: HomeAssistant) -> None: context={"source": config_entries.SOURCE_HASSIO}, ) assert result - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_hassio_confirm( @@ -195,24 +197,25 @@ async def test_hassio_confirm( context={"source": config_entries.SOURCE_HASSIO}, ) assert result - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "hassio_confirm" - assert result.get("description_placeholders") == {"addon": "AdGuard Home Addon"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "hassio_confirm" + assert result["description_placeholders"] == {"addon": "AdGuard Home Addon"} - result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - assert result2 - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == "AdGuard Home Addon" + assert result + assert result["type"] is FlowResultType.CREATE_ENTRY - data = result2.get("data") - assert data - assert data[CONF_HOST] == "mock-adguard" - assert data[CONF_PASSWORD] is None - assert data[CONF_PORT] == 3000 - assert data[CONF_SSL] is False - assert data[CONF_USERNAME] is None - assert data[CONF_VERIFY_SSL] + config_entry = result["result"] + assert config_entry.title == "AdGuard Home Addon" + assert config_entry.data == { + CONF_HOST: "mock-adguard", + CONF_PASSWORD: None, + CONF_PORT: 3000, + CONF_SSL: False, + CONF_USERNAME: None, + CONF_VERIFY_SSL: True, + } async def test_hassio_connection_error( @@ -241,6 +244,6 @@ async def test_hassio_connection_error( result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) assert result - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "hassio_confirm" - assert result.get("errors") == {"base": "cannot_connect"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "hassio_confirm" + assert result["errors"] == {"base": "cannot_connect"} From d79dc8d22f73346ee406b95be32cc266cc686283 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:13:37 -0500 Subject: [PATCH 158/677] Add source zone exclusion to Russound RIO (#130392) * Add source zone exclusion to Russound RIO * Ruff format --- .../components/russound_rio/media_player.py | 15 ++++++++++++++- tests/components/russound_rio/conftest.py | 4 +++- tests/components/russound_rio/const.py | 1 + 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index d0d8e02a282..299a6fb2cea 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -5,8 +5,10 @@ from __future__ import annotations import logging from aiorussound import Controller +from aiorussound.const import FeatureFlag from aiorussound.models import PlayStatus, Source from aiorussound.rio import ZoneControlSurface +from aiorussound.util import is_feature_supported from homeassistant.components.media_player import ( MediaPlayerDeviceClass, @@ -155,7 +157,18 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): @property def source_list(self) -> list[str]: """Return a list of available input sources.""" - return [x.name for x in self._sources.values()] + available_sources = ( + [ + source + for source_id, source in self._sources.items() + if source_id in self._zone.enabled_sources + ] + if is_feature_supported( + self._client.rio_version, FeatureFlag.SUPPORT_ZONE_SOURCE_EXCLUSION + ) + else self._sources.values() + ) + return [x.name for x in available_sources] @property def media_title(self) -> str | None: diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index deb7bfccdf0..5522c1e6ea2 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -11,7 +11,7 @@ import pytest from homeassistant.components.russound_rio.const import DOMAIN from homeassistant.core import HomeAssistant -from .const import HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT +from .const import API_VERSION, HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT from tests.common import MockConfigEntry, load_json_object_fixture @@ -71,4 +71,6 @@ def mock_russound_client() -> Generator[AsyncMock]: client.connection_handler = RussoundTcpConnectionHandler(HOST, PORT) client.is_connected = Mock(return_value=True) client.unregister_state_update_callbacks.return_value = True + client.rio_version = API_VERSION + yield client diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index 3d2924693d2..8f8ae7b59ea 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -8,6 +8,7 @@ HOST = "127.0.0.1" PORT = 9621 MODEL = "MCA-C5" HARDWARE_MAC = "00:11:22:33:44:55" +API_VERSION = "1.08.00" MOCK_CONFIG = { "host": HOST, From b9a7307df854b0b5beda88d26892195a7355deeb Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 21:17:05 +0100 Subject: [PATCH 159/677] Refactor light reproduce state to use kelvin attribute (#132854) --- .../components/light/reproduce_state.py | 21 ++++++-- .../components/light/test_reproduce_state.py | 48 ++++++++++++------- 2 files changed, 50 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/light/reproduce_state.py b/homeassistant/components/light/reproduce_state.py index c933b517ccc..a89209eb426 100644 --- a/homeassistant/components/light/reproduce_state.py +++ b/homeassistant/components/light/reproduce_state.py @@ -15,11 +15,13 @@ from homeassistant.const import ( STATE_ON, ) from homeassistant.core import Context, HomeAssistant, State +from homeassistant.util import color as color_util from . import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_RGB_COLOR, @@ -40,6 +42,7 @@ ATTR_GROUP = [ATTR_BRIGHTNESS, ATTR_EFFECT] COLOR_GROUP = [ ATTR_HS_COLOR, ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -55,7 +58,7 @@ class ColorModeAttr(NamedTuple): COLOR_MODE_TO_ATTRIBUTE = { - ColorMode.COLOR_TEMP: ColorModeAttr(ATTR_COLOR_TEMP, ATTR_COLOR_TEMP), + ColorMode.COLOR_TEMP: ColorModeAttr(ATTR_COLOR_TEMP_KELVIN, ATTR_COLOR_TEMP_KELVIN), ColorMode.HS: ColorModeAttr(ATTR_HS_COLOR, ATTR_HS_COLOR), ColorMode.RGB: ColorModeAttr(ATTR_RGB_COLOR, ATTR_RGB_COLOR), ColorMode.RGBW: ColorModeAttr(ATTR_RGBW_COLOR, ATTR_RGBW_COLOR), @@ -124,13 +127,25 @@ async def _async_reproduce_state( color_mode = state.attributes[ATTR_COLOR_MODE] if cm_attr := COLOR_MODE_TO_ATTRIBUTE.get(color_mode): if (cm_attr_state := state.attributes.get(cm_attr.state_attr)) is None: + if ( + color_mode != ColorMode.COLOR_TEMP + or (mireds := state.attributes.get(ATTR_COLOR_TEMP)) is None + ): + _LOGGER.warning( + "Color mode %s specified but attribute %s missing for: %s", + color_mode, + cm_attr.state_attr, + state.entity_id, + ) + return _LOGGER.warning( - "Color mode %s specified but attribute %s missing for: %s", + "Color mode %s specified but attribute %s missing for: %s, " + "using color_temp (mireds) as fallback", color_mode, cm_attr.state_attr, state.entity_id, ) - return + cm_attr_state = color_util.color_temperature_mired_to_kelvin(mireds) service_data[cm_attr.parameter] = cm_attr_state else: # Fall back to Choosing the first color that is specified diff --git a/tests/components/light/test_reproduce_state.py b/tests/components/light/test_reproduce_state.py index 30a5e3f6842..987e97c6eb2 100644 --- a/tests/components/light/test_reproduce_state.py +++ b/tests/components/light/test_reproduce_state.py @@ -10,7 +10,7 @@ from tests.common import async_mock_service VALID_BRIGHTNESS = {"brightness": 180} VALID_EFFECT = {"effect": "random"} -VALID_COLOR_TEMP = {"color_temp": 240} +VALID_COLOR_TEMP_KELVIN = {"color_temp_kelvin": 4200} VALID_HS_COLOR = {"hs_color": (345, 75)} VALID_RGB_COLOR = {"rgb_color": (255, 63, 111)} VALID_RGBW_COLOR = {"rgbw_color": (255, 63, 111, 10)} @@ -19,7 +19,7 @@ VALID_XY_COLOR = {"xy_color": (0.59, 0.274)} NONE_BRIGHTNESS = {"brightness": None} NONE_EFFECT = {"effect": None} -NONE_COLOR_TEMP = {"color_temp": None} +NONE_COLOR_TEMP_KELVIN = {"color_temp_kelvin": None} NONE_HS_COLOR = {"hs_color": None} NONE_RGB_COLOR = {"rgb_color": None} NONE_RGBW_COLOR = {"rgbw_color": None} @@ -34,7 +34,7 @@ async def test_reproducing_states( hass.states.async_set("light.entity_off", "off", {}) hass.states.async_set("light.entity_bright", "on", VALID_BRIGHTNESS) hass.states.async_set("light.entity_effect", "on", VALID_EFFECT) - hass.states.async_set("light.entity_temp", "on", VALID_COLOR_TEMP) + hass.states.async_set("light.entity_temp", "on", VALID_COLOR_TEMP_KELVIN) hass.states.async_set("light.entity_hs", "on", VALID_HS_COLOR) hass.states.async_set("light.entity_rgb", "on", VALID_RGB_COLOR) hass.states.async_set("light.entity_xy", "on", VALID_XY_COLOR) @@ -49,7 +49,7 @@ async def test_reproducing_states( State("light.entity_off", "off"), State("light.entity_bright", "on", VALID_BRIGHTNESS), State("light.entity_effect", "on", VALID_EFFECT), - State("light.entity_temp", "on", VALID_COLOR_TEMP), + State("light.entity_temp", "on", VALID_COLOR_TEMP_KELVIN), State("light.entity_hs", "on", VALID_HS_COLOR), State("light.entity_rgb", "on", VALID_RGB_COLOR), State("light.entity_xy", "on", VALID_XY_COLOR), @@ -73,7 +73,7 @@ async def test_reproducing_states( State("light.entity_xy", "off"), State("light.entity_off", "on", VALID_BRIGHTNESS), State("light.entity_bright", "on", VALID_EFFECT), - State("light.entity_effect", "on", VALID_COLOR_TEMP), + State("light.entity_effect", "on", VALID_COLOR_TEMP_KELVIN), State("light.entity_temp", "on", VALID_HS_COLOR), State("light.entity_hs", "on", VALID_RGB_COLOR), State("light.entity_rgb", "on", VALID_XY_COLOR), @@ -92,7 +92,7 @@ async def test_reproducing_states( expected_bright["entity_id"] = "light.entity_bright" expected_calls.append(expected_bright) - expected_effect = dict(VALID_COLOR_TEMP) + expected_effect = dict(VALID_COLOR_TEMP_KELVIN) expected_effect["entity_id"] = "light.entity_effect" expected_calls.append(expected_effect) @@ -146,7 +146,7 @@ async def test_filter_color_modes( """Test filtering of parameters according to color mode.""" hass.states.async_set("light.entity", "off", {}) all_colors = { - **VALID_COLOR_TEMP, + **VALID_COLOR_TEMP_KELVIN, **VALID_HS_COLOR, **VALID_RGB_COLOR, **VALID_RGBW_COLOR, @@ -162,7 +162,7 @@ async def test_filter_color_modes( ) expected_map = { - light.ColorMode.COLOR_TEMP: {**VALID_BRIGHTNESS, **VALID_COLOR_TEMP}, + light.ColorMode.COLOR_TEMP: {**VALID_BRIGHTNESS, **VALID_COLOR_TEMP_KELVIN}, light.ColorMode.BRIGHTNESS: VALID_BRIGHTNESS, light.ColorMode.HS: {**VALID_BRIGHTNESS, **VALID_HS_COLOR}, light.ColorMode.ONOFF: {**VALID_BRIGHTNESS}, @@ -201,13 +201,14 @@ async def test_filter_color_modes_missing_attributes( hass.states.async_set("light.entity", "off", {}) expected_log = ( "Color mode color_temp specified " - "but attribute color_temp missing for: light.entity" + "but attribute color_temp_kelvin missing for: light.entity" ) + expected_fallback_log = "using color_temp (mireds) as fallback" turn_on_calls = async_mock_service(hass, "light", "turn_on") all_colors = { - **VALID_COLOR_TEMP, + **VALID_COLOR_TEMP_KELVIN, **VALID_HS_COLOR, **VALID_RGB_COLOR, **VALID_RGBW_COLOR, @@ -216,9 +217,9 @@ async def test_filter_color_modes_missing_attributes( **VALID_BRIGHTNESS, } - # Test missing `color_temp` attribute + # Test missing `color_temp_kelvin` attribute stored_attributes = {**all_colors} - stored_attributes.pop("color_temp") + stored_attributes.pop("color_temp_kelvin") caplog.clear() await async_reproduce_state( hass, @@ -226,11 +227,25 @@ async def test_filter_color_modes_missing_attributes( ) assert len(turn_on_calls) == 0 assert expected_log in caplog.text + assert expected_fallback_log not in caplog.text - # Test with correct `color_temp` attribute - stored_attributes["color_temp"] = 240 - expected = {"brightness": 180, "color_temp": 240} + # Test with deprecated `color_temp` attribute + stored_attributes["color_temp"] = 250 + expected = {"brightness": 180, "color_temp_kelvin": 4000} caplog.clear() + await async_reproduce_state( + hass, + [State("light.entity", "on", {**stored_attributes, "color_mode": color_mode})], + ) + + assert len(turn_on_calls) == 1 + assert expected_log in caplog.text + assert expected_fallback_log in caplog.text + + # Test with correct `color_temp_kelvin` attribute + expected = {"brightness": 180, "color_temp_kelvin": 4200} + caplog.clear() + turn_on_calls.clear() await async_reproduce_state( hass, [State("light.entity", "on", {**all_colors, "color_mode": color_mode})], @@ -239,6 +254,7 @@ async def test_filter_color_modes_missing_attributes( assert turn_on_calls[0].domain == "light" assert dict(turn_on_calls[0].data) == {"entity_id": "light.entity", **expected} assert expected_log not in caplog.text + assert expected_fallback_log not in caplog.text @pytest.mark.parametrize( @@ -246,7 +262,7 @@ async def test_filter_color_modes_missing_attributes( [ NONE_BRIGHTNESS, NONE_EFFECT, - NONE_COLOR_TEMP, + NONE_COLOR_TEMP_KELVIN, NONE_HS_COLOR, NONE_RGB_COLOR, NONE_RGBW_COLOR, From d02bceb6f32282267a710867ef0529996601585b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 21:17:31 +0100 Subject: [PATCH 160/677] Migrate alexa color_temp handlers to use Kelvin (#132995) --- homeassistant/components/alexa/handlers.py | 16 ++++++++-------- tests/components/alexa/test_capabilities.py | 20 ++++++++++++++------ 2 files changed, 22 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 9b857ff4dfd..04bef105546 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -376,14 +376,14 @@ async def async_api_decrease_color_temp( ) -> AlexaResponse: """Process a decrease color temperature request.""" entity = directive.entity - current = int(entity.attributes[light.ATTR_COLOR_TEMP]) - max_mireds = int(entity.attributes[light.ATTR_MAX_MIREDS]) + current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN]) + min_kelvin = int(entity.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN]) - value = min(max_mireds, current + 50) + value = max(min_kelvin, current - 500) await hass.services.async_call( entity.domain, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value}, + {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value}, blocking=False, context=context, ) @@ -400,14 +400,14 @@ async def async_api_increase_color_temp( ) -> AlexaResponse: """Process an increase color temperature request.""" entity = directive.entity - current = int(entity.attributes[light.ATTR_COLOR_TEMP]) - min_mireds = int(entity.attributes[light.ATTR_MIN_MIREDS]) + current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN]) + max_kelvin = int(entity.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN]) - value = max(min_mireds, current - 50) + value = min(max_kelvin, current + 500) await hass.services.async_call( entity.domain, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value}, + {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value}, blocking=False, context=context, ) diff --git a/tests/components/alexa/test_capabilities.py b/tests/components/alexa/test_capabilities.py index 823afd515b2..b10a93df0c9 100644 --- a/tests/components/alexa/test_capabilities.py +++ b/tests/components/alexa/test_capabilities.py @@ -163,7 +163,7 @@ async def test_api_set_color_temperature(hass: HomeAssistant) -> None: assert msg["header"]["name"] == "Response" -@pytest.mark.parametrize(("result", "initial"), [(383, "333"), (500, "500")]) +@pytest.mark.parametrize(("result", "initial"), [(2500, "3000"), (2000, "2000")]) async def test_api_decrease_color_temp( hass: HomeAssistant, result: int, initial: str ) -> None: @@ -176,7 +176,11 @@ async def test_api_decrease_color_temp( hass.states.async_set( "light.test", "off", - {"friendly_name": "Test light", "color_temp": initial, "max_mireds": 500}, + { + "friendly_name": "Test light", + "color_temp_kelvin": initial, + "min_color_temp_kelvin": 2000, + }, ) call_light = async_mock_service(hass, "light", "turn_on") @@ -189,11 +193,11 @@ async def test_api_decrease_color_temp( assert len(call_light) == 1 assert call_light[0].data["entity_id"] == "light.test" - assert call_light[0].data["color_temp"] == result + assert call_light[0].data["color_temp_kelvin"] == result assert msg["header"]["name"] == "Response" -@pytest.mark.parametrize(("result", "initial"), [(283, "333"), (142, "142")]) +@pytest.mark.parametrize(("result", "initial"), [(3500, "3000"), (7000, "7000")]) async def test_api_increase_color_temp( hass: HomeAssistant, result: int, initial: str ) -> None: @@ -206,7 +210,11 @@ async def test_api_increase_color_temp( hass.states.async_set( "light.test", "off", - {"friendly_name": "Test light", "color_temp": initial, "min_mireds": 142}, + { + "friendly_name": "Test light", + "color_temp_kelvin": initial, + "max_color_temp_kelvin": 7000, + }, ) call_light = async_mock_service(hass, "light", "turn_on") @@ -219,7 +227,7 @@ async def test_api_increase_color_temp( assert len(call_light) == 1 assert call_light[0].data["entity_id"] == "light.test" - assert call_light[0].data["color_temp"] == result + assert call_light[0].data["color_temp_kelvin"] == result assert msg["header"]["name"] == "Response" From aa7e02485301b788d0c58d30ae1333132049703c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 21:17:52 +0100 Subject: [PATCH 161/677] Migrate lifx light tests to use Kelvin (#133020) --- tests/components/lifx/test_light.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 88c2115ce47..ffe819fa2cb 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -9,7 +9,7 @@ import pytest from homeassistant.components import lifx from homeassistant.components.lifx import DOMAIN -from homeassistant.components.lifx.const import ATTR_POWER +from homeassistant.components.lifx.const import _ATTR_COLOR_TEMP, ATTR_POWER from homeassistant.components.lifx.light import ATTR_INFRARED, ATTR_ZONES from homeassistant.components.lifx.manager import ( ATTR_CLOUD_SATURATION_MAX, @@ -31,7 +31,6 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS_PCT, ATTR_COLOR_MODE, ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, @@ -1263,7 +1262,7 @@ async def test_white_bulb(hass: HomeAssistant) -> None: await hass.services.async_call( LIGHT_DOMAIN, "turn_on", - {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP: 400}, + {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) assert bulb.set_color.calls[0][0][0] == [32000, 0, 32000, 2500] @@ -1759,7 +1758,7 @@ async def test_lifx_set_state_kelvin(hass: HomeAssistant) -> None: await hass.services.async_call( DOMAIN, "set_state", - {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255, ATTR_COLOR_TEMP: 400}, + {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255, _ATTR_COLOR_TEMP: 400}, blocking=True, ) assert bulb.set_color.calls[0][0][0] == [32000, 0, 65535, 2500] From 61b1b50c342018b847125316ac19d0b6a6d5a1b0 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 21:19:05 +0100 Subject: [PATCH 162/677] Improve Solar.Forecast configuration flow tests (#133077) --- .../forecast_solar/test_config_flow.py | 111 +++++++++++------- 1 file changed, 71 insertions(+), 40 deletions(-) diff --git a/tests/components/forecast_solar/test_config_flow.py b/tests/components/forecast_solar/test_config_flow.py index abaad402e1b..8fffb5096bc 100644 --- a/tests/components/forecast_solar/test_config_flow.py +++ b/tests/components/forecast_solar/test_config_flow.py @@ -2,6 +2,8 @@ from unittest.mock import AsyncMock +import pytest + from homeassistant.components.forecast_solar.const import ( CONF_AZIMUTH, CONF_DAMPING_EVENING, @@ -25,10 +27,10 @@ async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ CONF_NAME: "Name", @@ -40,13 +42,16 @@ async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No }, ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == "Name" - assert result2.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Name" + assert config_entry.unique_id is None + assert config_entry.data == { CONF_LATITUDE: 52.42, CONF_LONGITUDE: 4.42, } - assert result2.get("options") == { + assert config_entry.options == { CONF_AZIMUTH: 142, CONF_DECLINATION: 42, CONF_MODULES_POWER: 4242, @@ -55,9 +60,9 @@ async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_options_flow_invalid_api( hass: HomeAssistant, - mock_setup_entry: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: """Test options config flow when API key is invalid.""" @@ -67,10 +72,10 @@ async def test_options_flow_invalid_api( result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "init" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" - result2 = await hass.config_entries.options.async_configure( + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ CONF_API_KEY: "solarPOWER!", @@ -84,27 +89,11 @@ async def test_options_flow_invalid_api( ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.FORM - assert result2["errors"] == {CONF_API_KEY: "invalid_api_key"} + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {CONF_API_KEY: "invalid_api_key"} - -async def test_options_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test config flow options.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "init" - - # With the API key - result2 = await hass.config_entries.options.async_configure( + # Ensure we can recover from this error + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ CONF_API_KEY: "SolarForecast150", @@ -118,8 +107,8 @@ async def test_options_flow( ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { CONF_API_KEY: "SolarForecast150", CONF_DECLINATION: 21, CONF_AZIMUTH: 22, @@ -130,9 +119,9 @@ async def test_options_flow( } -async def test_options_flow_without_key( +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_flow( hass: HomeAssistant, - mock_setup_entry: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: """Test config flow options.""" @@ -142,11 +131,53 @@ async def test_options_flow_without_key( result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "init" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # With the API key + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_API_KEY: "SolarForecast150", + CONF_DECLINATION: 21, + CONF_AZIMUTH: 22, + CONF_MODULES_POWER: 2122, + CONF_DAMPING_MORNING: 0.25, + CONF_DAMPING_EVENING: 0.25, + CONF_INVERTER_SIZE: 2000, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_API_KEY: "SolarForecast150", + CONF_DECLINATION: 21, + CONF_AZIMUTH: 22, + CONF_MODULES_POWER: 2122, + CONF_DAMPING_MORNING: 0.25, + CONF_DAMPING_EVENING: 0.25, + CONF_INVERTER_SIZE: 2000, + } + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_flow_without_key( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test config flow options.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" # Without the API key - result2 = await hass.config_entries.options.async_configure( + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ CONF_DECLINATION: 21, @@ -159,8 +190,8 @@ async def test_options_flow_without_key( ) await hass.async_block_till_done() - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { CONF_API_KEY: None, CONF_DECLINATION: 21, CONF_AZIMUTH: 22, From 2cff7526d01e985c9b9035dced9a662a092cded9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 22:15:49 +0100 Subject: [PATCH 163/677] Add test-before-setup rule to quality_scale validation (#132255) * Add test-before-setup rule to quality_scale validation * Use ast_parse_module * Add rules_done * Add Config argument --- script/hassfest/quality_scale.py | 3 +- .../test_before_setup.py | 69 +++++++++++++++++++ 2 files changed, 71 insertions(+), 1 deletion(-) create mode 100644 script/hassfest/quality_scale_validation/test_before_setup.py diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 784573f5f8f..f3b285c8485 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -23,6 +23,7 @@ from .quality_scale_validation import ( reconfiguration_flow, runtime_data, strict_typing, + test_before_setup, unique_config_entry, ) @@ -56,7 +57,7 @@ ALL_RULES = [ Rule("has-entity-name", ScaledQualityScaleTiers.BRONZE), Rule("runtime-data", ScaledQualityScaleTiers.BRONZE, runtime_data), Rule("test-before-configure", ScaledQualityScaleTiers.BRONZE), - Rule("test-before-setup", ScaledQualityScaleTiers.BRONZE), + Rule("test-before-setup", ScaledQualityScaleTiers.BRONZE, test_before_setup), Rule("unique-config-entry", ScaledQualityScaleTiers.BRONZE, unique_config_entry), # SILVER Rule("action-exceptions", ScaledQualityScaleTiers.SILVER), diff --git a/script/hassfest/quality_scale_validation/test_before_setup.py b/script/hassfest/quality_scale_validation/test_before_setup.py new file mode 100644 index 00000000000..db737c99e37 --- /dev/null +++ b/script/hassfest/quality_scale_validation/test_before_setup.py @@ -0,0 +1,69 @@ +"""Enforce that the integration raises correctly during initialisation. + +https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/test-before-setup/ +""" + +import ast + +from script.hassfest import ast_parse_module +from script.hassfest.model import Config, Integration + +_VALID_EXCEPTIONS = { + "ConfigEntryNotReady", + "ConfigEntryAuthFailed", + "ConfigEntryError", +} + + +def _raises_exception(async_setup_entry_function: ast.AsyncFunctionDef) -> bool: + """Check that a valid exception is raised within `async_setup_entry`.""" + for node in ast.walk(async_setup_entry_function): + if isinstance(node, ast.Raise): + if isinstance(node.exc, ast.Name) and node.exc.id in _VALID_EXCEPTIONS: + return True + if isinstance(node.exc, ast.Call) and node.exc.func.id in _VALID_EXCEPTIONS: + return True + + return False + + +def _calls_first_refresh(async_setup_entry_function: ast.AsyncFunctionDef) -> bool: + """Check that a async_config_entry_first_refresh within `async_setup_entry`.""" + for node in ast.walk(async_setup_entry_function): + if ( + isinstance(node, ast.Call) + and isinstance(node.func, ast.Attribute) + and node.func.attr == "async_config_entry_first_refresh" + ): + return True + + return False + + +def _get_setup_entry_function(module: ast.Module) -> ast.AsyncFunctionDef | None: + """Get async_setup_entry function.""" + for item in module.body: + if isinstance(item, ast.AsyncFunctionDef) and item.name == "async_setup_entry": + return item + return None + + +def validate( + config: Config, integration: Integration, *, rules_done: set[str] +) -> list[str] | None: + """Validate correct use of ConfigEntry.runtime_data.""" + init_file = integration.path / "__init__.py" + init = ast_parse_module(init_file) + + # Should not happen, but better to be safe + if not (async_setup_entry := _get_setup_entry_function(init)): + return [f"Could not find `async_setup_entry` in {init_file}"] + + if not ( + _raises_exception(async_setup_entry) or _calls_first_refresh(async_setup_entry) + ): + return [ + f"Integration does not raise one of {_VALID_EXCEPTIONS} " + f"in async_setup_entry ({init_file})" + ] + return None From bf9788b9c4724b46a0289342d6122477df2d883e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 12 Dec 2024 22:16:28 +0100 Subject: [PATCH 164/677] Fix CI failure in russound_rio (#133081) * Fix CI in russound_rio * Adjust --- homeassistant/components/russound_rio/media_player.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 299a6fb2cea..02467731ec3 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import TYPE_CHECKING from aiorussound import Controller from aiorussound.const import FeatureFlag @@ -157,6 +158,8 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): @property def source_list(self) -> list[str]: """Return a list of available input sources.""" + if TYPE_CHECKING: + assert self._client.rio_version available_sources = ( [ source From 2af5c5ecda516bb2adf774140622a3d52ea11146 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Thu, 12 Dec 2024 20:26:30 -0800 Subject: [PATCH 165/677] Update Rainbird quality scale grading on the Silver quality checks (#131498) * Grade Rainbird on the Silver quality scale * Remove done comments * Update quality_scale.yaml * Update config-flow-test-coverage --- .../components/rainbird/quality_scale.yaml | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/rainbird/quality_scale.yaml b/homeassistant/components/rainbird/quality_scale.yaml index cd000c63fad..8b4805a9b0e 100644 --- a/homeassistant/components/rainbird/quality_scale.yaml +++ b/homeassistant/components/rainbird/quality_scale.yaml @@ -34,21 +34,31 @@ rules: docs-removal-instructions: todo test-before-setup: done docs-high-level-description: done - config-flow-test-coverage: done + config-flow-test-coverage: + status: todo + comment: | + All config flow tests should finish with CREATE_ENTRY and ABORT to + test they are able to recover from errors docs-actions: done runtime-data: done # Silver - log-when-unavailable: todo - config-entry-unloading: todo + log-when-unavailable: done + config-entry-unloading: done reauthentication-flow: done - action-exceptions: todo - docs-installation-parameters: todo - integration-owner: todo - parallel-updates: todo - test-coverage: todo - docs-configuration-parameters: todo - entity-unavailable: todo + action-exceptions: done + docs-installation-parameters: + status: todo + comment: The documentation does not mention installation parameters + integration-owner: done + parallel-updates: + status: todo + comment: The integration does not explicitly set a number of parallel updates. + test-coverage: done + docs-configuration-parameters: + status: todo + comment: The documentation for configuration parameters could be improved. + entity-unavailable: done # Gold docs-examples: todo From 72cc1f4d39b2bc844d9e2572f9789c4edd8335d9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 06:51:55 +0100 Subject: [PATCH 166/677] Use correct ATTR_KELVIN constant in yeelight tests (#133088) --- tests/components/yeelight/test_light.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/yeelight/test_light.py b/tests/components/yeelight/test_light.py index 274d0a158f0..56162d4d9d1 100644 --- a/tests/components/yeelight/test_light.py +++ b/tests/components/yeelight/test_light.py @@ -28,7 +28,6 @@ from homeassistant.components.light import ( ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, - ATTR_KELVIN, ATTR_RGB_COLOR, ATTR_TRANSITION, FLASH_LONG, @@ -59,6 +58,7 @@ from homeassistant.components.yeelight.const import ( YEELIGHT_TEMPERATURE_TRANSACTION, ) from homeassistant.components.yeelight.light import ( + ATTR_KELVIN, ATTR_MINUTES, ATTR_MODE, EFFECT_CANDLE_FLICKER, From 09b06f839d7a154dcaed298eb360a839f915d2eb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 07:47:40 +0100 Subject: [PATCH 167/677] Bump github/codeql-action from 3.27.7 to 3.27.9 (#133104) --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 8f6e393f853..d3efa8ebaa3 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.27.7 + uses: github/codeql-action/init@v3.27.9 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.27.7 + uses: github/codeql-action/analyze@v3.27.9 with: category: "/language:python" From 0ffb588d5cdaeceba4c18a2ac5af42c4c0848348 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Fri, 13 Dec 2024 07:53:25 +0100 Subject: [PATCH 168/677] Move config entry type of energyzero integration (#133094) Move config_entry type to coordinator file --- homeassistant/components/energyzero/__init__.py | 7 ++----- homeassistant/components/energyzero/coordinator.py | 5 ++++- homeassistant/components/energyzero/diagnostics.py | 3 +-- homeassistant/components/energyzero/sensor.py | 7 +++++-- homeassistant/components/energyzero/services.py | 7 ++----- 5 files changed, 14 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/energyzero/__init__.py b/homeassistant/components/energyzero/__init__.py index f7591056383..fc2855374dd 100644 --- a/homeassistant/components/energyzero/__init__.py +++ b/homeassistant/components/energyzero/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -10,14 +9,12 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .coordinator import EnergyZeroDataUpdateCoordinator +from .coordinator import EnergyZeroConfigEntry, EnergyZeroDataUpdateCoordinator from .services import async_setup_services PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -type EnergyZeroConfigEntry = ConfigEntry[EnergyZeroDataUpdateCoordinator] - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up EnergyZero services.""" @@ -30,7 +27,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> bool: """Set up EnergyZero from a config entry.""" - coordinator = EnergyZeroDataUpdateCoordinator(hass) + coordinator = EnergyZeroDataUpdateCoordinator(hass, entry) try: await coordinator.async_config_entry_first_refresh() except ConfigEntryNotReady: diff --git a/homeassistant/components/energyzero/coordinator.py b/homeassistant/components/energyzero/coordinator.py index 65955b2ebe6..35054f7b3b7 100644 --- a/homeassistant/components/energyzero/coordinator.py +++ b/homeassistant/components/energyzero/coordinator.py @@ -21,6 +21,8 @@ from homeassistant.util import dt as dt_util from .const import DOMAIN, LOGGER, SCAN_INTERVAL, THRESHOLD_HOUR +type EnergyZeroConfigEntry = ConfigEntry[EnergyZeroDataUpdateCoordinator] + class EnergyZeroData(NamedTuple): """Class for defining data in dict.""" @@ -35,13 +37,14 @@ class EnergyZeroDataUpdateCoordinator(DataUpdateCoordinator[EnergyZeroData]): config_entry: ConfigEntry - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, entry: EnergyZeroConfigEntry) -> None: """Initialize global EnergyZero data updater.""" super().__init__( hass, LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL, + config_entry=entry, ) self.energyzero = EnergyZero(session=async_get_clientsession(hass)) diff --git a/homeassistant/components/energyzero/diagnostics.py b/homeassistant/components/energyzero/diagnostics.py index e6116eac259..0a45d87fee5 100644 --- a/homeassistant/components/energyzero/diagnostics.py +++ b/homeassistant/components/energyzero/diagnostics.py @@ -7,8 +7,7 @@ from typing import Any from homeassistant.core import HomeAssistant -from . import EnergyZeroConfigEntry -from .coordinator import EnergyZeroData +from .coordinator import EnergyZeroConfigEntry, EnergyZeroData def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: diff --git a/homeassistant/components/energyzero/sensor.py b/homeassistant/components/energyzero/sensor.py index d52da599966..141ac793fba 100644 --- a/homeassistant/components/energyzero/sensor.py +++ b/homeassistant/components/energyzero/sensor.py @@ -25,9 +25,12 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import EnergyZeroConfigEntry from .const import DOMAIN, SERVICE_TYPE_DEVICE_NAMES -from .coordinator import EnergyZeroData, EnergyZeroDataUpdateCoordinator +from .coordinator import ( + EnergyZeroConfigEntry, + EnergyZeroData, + EnergyZeroDataUpdateCoordinator, +) @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/energyzero/services.py b/homeassistant/components/energyzero/services.py index ba2bbf0573f..286735895ad 100644 --- a/homeassistant/components/energyzero/services.py +++ b/homeassistant/components/energyzero/services.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import date, datetime from enum import Enum from functools import partial -from typing import TYPE_CHECKING, Final +from typing import Final from energyzero import Electricity, Gas, VatOption import voluptuous as vol @@ -22,11 +22,8 @@ from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import selector from homeassistant.util import dt as dt_util -if TYPE_CHECKING: - from . import EnergyZeroConfigEntry - from .const import DOMAIN -from .coordinator import EnergyZeroDataUpdateCoordinator +from .coordinator import EnergyZeroConfigEntry, EnergyZeroDataUpdateCoordinator ATTR_CONFIG_ENTRY: Final = "config_entry" ATTR_START: Final = "start" From 263eb41e799d73915ee979b14fa6464872473ea1 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 08:24:18 +0100 Subject: [PATCH 169/677] Remove unused constant from blink (#133109) --- homeassistant/components/blink/services.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/blink/services.py b/homeassistant/components/blink/services.py index 5f51598e721..dd5d1e37627 100644 --- a/homeassistant/components/blink/services.py +++ b/homeassistant/components/blink/services.py @@ -5,7 +5,7 @@ from __future__ import annotations import voluptuous as vol from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_DEVICE_ID, CONF_PIN +from homeassistant.const import CONF_PIN from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv @@ -13,11 +13,6 @@ from homeassistant.helpers import config_validation as cv from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, SERVICE_SEND_PIN from .coordinator import BlinkConfigEntry -SERVICE_UPDATE_SCHEMA = vol.Schema( - { - vol.Required(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]), - } -) SERVICE_SEND_PIN_SCHEMA = vol.Schema( { vol.Required(ATTR_CONFIG_ENTRY_ID): vol.All(cv.ensure_list, [cv.string]), From 8bd2c183e280d14643ce5b56bd0de44191a921b8 Mon Sep 17 00:00:00 2001 From: Brandon Rothweiler <2292715+bdr99@users.noreply.github.com> Date: Fri, 13 Dec 2024 02:46:15 -0500 Subject: [PATCH 170/677] Bump py-aosmith to 1.0.12 (#133100) --- homeassistant/components/aosmith/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/aosmith/manifest.json b/homeassistant/components/aosmith/manifest.json index eae7981d5b9..a928a6677cb 100644 --- a/homeassistant/components/aosmith/manifest.json +++ b/homeassistant/components/aosmith/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/aosmith", "iot_class": "cloud_polling", - "requirements": ["py-aosmith==1.0.11"] + "requirements": ["py-aosmith==1.0.12"] } diff --git a/requirements_all.txt b/requirements_all.txt index 8f4705e878e..17998ba7fef 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1677,7 +1677,7 @@ pushover_complete==1.1.1 pvo==2.2.0 # homeassistant.components.aosmith -py-aosmith==1.0.11 +py-aosmith==1.0.12 # homeassistant.components.canary py-canary==0.5.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3a88a5a2d41..3965fbc0a3a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1378,7 +1378,7 @@ pushover_complete==1.1.1 pvo==2.2.0 # homeassistant.components.aosmith -py-aosmith==1.0.11 +py-aosmith==1.0.12 # homeassistant.components.canary py-canary==0.5.4 From de89be05129b9fe00f561f29179d12bc5bd8b400 Mon Sep 17 00:00:00 2001 From: David Bonnes Date: Fri, 13 Dec 2024 07:54:14 +0000 Subject: [PATCH 171/677] Bugfix to use evohome's new hostname (#133085) --- homeassistant/components/evohome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/evohome/manifest.json b/homeassistant/components/evohome/manifest.json index da3d197f6aa..22edadad7f4 100644 --- a/homeassistant/components/evohome/manifest.json +++ b/homeassistant/components/evohome/manifest.json @@ -6,5 +6,5 @@ "iot_class": "cloud_polling", "loggers": ["evohomeasync", "evohomeasync2"], "quality_scale": "legacy", - "requirements": ["evohome-async==0.4.20"] + "requirements": ["evohome-async==0.4.21"] } diff --git a/requirements_all.txt b/requirements_all.txt index 17998ba7fef..4f61b88ed00 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -879,7 +879,7 @@ eufylife-ble-client==0.1.8 # evdev==1.6.1 # homeassistant.components.evohome -evohome-async==0.4.20 +evohome-async==0.4.21 # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3965fbc0a3a..06448688306 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -745,7 +745,7 @@ eternalegypt==0.0.16 eufylife-ble-client==0.1.8 # homeassistant.components.evohome -evohome-async==0.4.20 +evohome-async==0.4.21 # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 From 53439d6e2a31dcea27727613f4e06660973ffb05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Fri, 13 Dec 2024 08:55:44 +0100 Subject: [PATCH 172/677] Handle step size correctly in myuplink number platform (#133016) --- homeassistant/components/myuplink/number.py | 13 +- .../fixtures/device_points_nibe_f730.json | 17 +++ .../myuplink/snapshots/test_diagnostics.ambr | 34 +++++ .../myuplink/snapshots/test_number.ambr | 126 ++++++++++++++++-- 4 files changed, 177 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/myuplink/number.py b/homeassistant/components/myuplink/number.py index b05ab5d46c9..3d336953396 100644 --- a/homeassistant/components/myuplink/number.py +++ b/homeassistant/components/myuplink/number.py @@ -110,13 +110,16 @@ class MyUplinkNumber(MyUplinkEntity, NumberEntity): # Internal properties self.point_id = device_point.parameter_id self._attr_name = device_point.parameter_name + _scale = float(device_point.scale_value if device_point.scale_value else 1.0) self._attr_native_min_value = ( - device_point.raw["minValue"] if device_point.raw["minValue"] else -30000 - ) * float(device_point.raw.get("scaleValue", 1)) + device_point.min_value if device_point.min_value else -30000 + ) * _scale self._attr_native_max_value = ( - device_point.raw["maxValue"] if device_point.raw["maxValue"] else 30000 - ) * float(device_point.raw.get("scaleValue", 1)) - self._attr_step_value = device_point.raw.get("stepValue", 20) + device_point.max_value if device_point.max_value else 30000 + ) * _scale + self._attr_native_step = ( + device_point.step_value if device_point.step_value else 1.0 + ) * _scale if entity_description is not None: self.entity_description = entity_description diff --git a/tests/components/myuplink/fixtures/device_points_nibe_f730.json b/tests/components/myuplink/fixtures/device_points_nibe_f730.json index aaccdec530a..0a61ab05f21 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_f730.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_f730.json @@ -1091,5 +1091,22 @@ "enumValues": [], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47398", + "parameterName": "Room sensor set point value heating climate system 1", + "parameterUnit": "°C", + "writable": true, + "timestamp": "2024-12-11T13:23:12+00:00", + "value": 14.5, + "strVal": "14.5°C", + "smartHomeCategories": [], + "minValue": 50.0, + "maxValue": 350.0, + "stepValue": 5.0, + "enumValues": [], + "scaleValue": "0.1", + "zoneId": null } ] diff --git a/tests/components/myuplink/snapshots/test_diagnostics.ambr b/tests/components/myuplink/snapshots/test_diagnostics.ambr index 71b33c58a87..6fe6becff11 100644 --- a/tests/components/myuplink/snapshots/test_diagnostics.ambr +++ b/tests/components/myuplink/snapshots/test_diagnostics.ambr @@ -1152,6 +1152,23 @@ "enumValues": [], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47398", + "parameterName": "Room sensor set point value heating climate system 1", + "parameterUnit": "°C", + "writable": true, + "timestamp": "2024-12-11T13:23:12+00:00", + "value": 14.5, + "strVal": "14.5°C", + "smartHomeCategories": [], + "minValue": 50.0, + "maxValue": 350.0, + "stepValue": 5.0, + "enumValues": [], + "scaleValue": "0.1", + "zoneId": null } ] @@ -2297,6 +2314,23 @@ "enumValues": [], "scaleValue": "1", "zoneId": null + }, + { + "category": "F730 CU 3x400V", + "parameterId": "47398", + "parameterName": "Room sensor set point value heating climate system 1", + "parameterUnit": "°C", + "writable": true, + "timestamp": "2024-12-11T13:23:12+00:00", + "value": 14.5, + "strVal": "14.5°C", + "smartHomeCategories": [], + "minValue": 50.0, + "maxValue": 350.0, + "stepValue": 5.0, + "enumValues": [], + "scaleValue": "0.1", + "zoneId": null } ] diff --git a/tests/components/myuplink/snapshots/test_number.ambr b/tests/components/myuplink/snapshots/test_number.ambr index db1a8e0949f..c47d3c60295 100644 --- a/tests/components/myuplink/snapshots/test_number.ambr +++ b/tests/components/myuplink/snapshots/test_number.ambr @@ -8,7 +8,7 @@ 'max': 3000.0, 'min': -3000.0, 'mode': , - 'step': 1.0, + 'step': 0.1, }), 'config_entry_id': , 'device_class': None, @@ -44,7 +44,7 @@ 'max': 3000.0, 'min': -3000.0, 'mode': , - 'step': 1.0, + 'step': 0.1, 'unit_of_measurement': 'DM', }), 'context': , @@ -64,7 +64,7 @@ 'max': 3000.0, 'min': -3000.0, 'mode': , - 'step': 1.0, + 'step': 0.1, }), 'config_entry_id': , 'device_class': None, @@ -100,7 +100,7 @@ 'max': 3000.0, 'min': -3000.0, 'mode': , - 'step': 1.0, + 'step': 0.1, 'unit_of_measurement': 'DM', }), 'context': , @@ -221,6 +221,116 @@ 'state': '1.0', }) # --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Room sensor set point value heating climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47398', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Room sensor set point value heating climate system 1', + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'context': , + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.5', + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Room sensor set point value heating climate system 1', + 'platform': 'myuplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'batman-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47398', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_states[platforms0][number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gotham City Room sensor set point value heating climate system 1', + 'max': 35.0, + 'min': 5.0, + 'mode': , + 'step': 0.5, + }), + 'context': , + 'entity_id': 'number.gotham_city_room_sensor_set_point_value_heating_climate_system_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.5', + }) +# --- # name: test_number_states[platforms0][number.gotham_city_start_diff_additional_heat-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -230,7 +340,7 @@ 'max': 2000.0, 'min': 100.0, 'mode': , - 'step': 1.0, + 'step': 10.0, }), 'config_entry_id': , 'device_class': None, @@ -266,7 +376,7 @@ 'max': 2000.0, 'min': 100.0, 'mode': , - 'step': 1.0, + 'step': 10.0, 'unit_of_measurement': 'DM', }), 'context': , @@ -286,7 +396,7 @@ 'max': 2000.0, 'min': 100.0, 'mode': , - 'step': 1.0, + 'step': 10.0, }), 'config_entry_id': , 'device_class': None, @@ -322,7 +432,7 @@ 'max': 2000.0, 'min': 100.0, 'mode': , - 'step': 1.0, + 'step': 10.0, 'unit_of_measurement': 'DM', }), 'context': , From e3d14e699316bef29f41c0ba580d0cef434ec98d Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:01:48 +0100 Subject: [PATCH 173/677] Bump pysuezV2 to 1.3.5 (#133076) --- homeassistant/components/suez_water/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/suez_water/manifest.json b/homeassistant/components/suez_water/manifest.json index 240be0f37bd..7e720a86afd 100644 --- a/homeassistant/components/suez_water/manifest.json +++ b/homeassistant/components/suez_water/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/suez_water", "iot_class": "cloud_polling", "loggers": ["pysuez", "regex"], - "requirements": ["pysuezV2==1.3.2"] + "requirements": ["pysuezV2==1.3.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4f61b88ed00..9c1285b6d32 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2301,7 +2301,7 @@ pysqueezebox==0.10.0 pystiebeleltron==0.0.1.dev2 # homeassistant.components.suez_water -pysuezV2==1.3.2 +pysuezV2==1.3.5 # homeassistant.components.switchbee pyswitchbee==1.8.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 06448688306..56c8be03f43 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1864,7 +1864,7 @@ pyspeex-noise==1.0.2 pysqueezebox==0.10.0 # homeassistant.components.suez_water -pysuezV2==1.3.2 +pysuezV2==1.3.5 # homeassistant.components.switchbee pyswitchbee==1.8.3 From 11b65b1eb313c0d816bfdc99d36b7c9d3d347cd8 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Fri, 13 Dec 2024 09:21:14 +0100 Subject: [PATCH 174/677] Bump watchdog to 6.0.0 (#132895) --- .../components/folder_watcher/__init__.py | 14 +++++++++----- .../components/folder_watcher/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/folder_watcher/__init__.py b/homeassistant/components/folder_watcher/__init__.py index 3aeaa6f7ef2..dd56b3aad72 100644 --- a/homeassistant/components/folder_watcher/__init__.py +++ b/homeassistant/components/folder_watcher/__init__.py @@ -7,6 +7,10 @@ import os from typing import cast from watchdog.events import ( + DirCreatedEvent, + DirDeletedEvent, + DirModifiedEvent, + DirMovedEvent, FileClosedEvent, FileCreatedEvent, FileDeletedEvent, @@ -68,7 +72,7 @@ class EventHandler(PatternMatchingEventHandler): def __init__(self, patterns: list[str], hass: HomeAssistant, entry_id: str) -> None: """Initialise the EventHandler.""" - super().__init__(patterns) + super().__init__(patterns=patterns) self.hass = hass self.entry_id = entry_id @@ -101,19 +105,19 @@ class EventHandler(PatternMatchingEventHandler): signal = f"folder_watcher-{self.entry_id}" dispatcher_send(self.hass, signal, event.event_type, fireable) - def on_modified(self, event: FileModifiedEvent) -> None: + def on_modified(self, event: DirModifiedEvent | FileModifiedEvent) -> None: """File modified.""" self.process(event) - def on_moved(self, event: FileMovedEvent) -> None: + def on_moved(self, event: DirMovedEvent | FileMovedEvent) -> None: """File moved.""" self.process(event, moved=True) - def on_created(self, event: FileCreatedEvent) -> None: + def on_created(self, event: DirCreatedEvent | FileCreatedEvent) -> None: """File created.""" self.process(event) - def on_deleted(self, event: FileDeletedEvent) -> None: + def on_deleted(self, event: DirDeletedEvent | FileDeletedEvent) -> None: """File deleted.""" self.process(event) diff --git a/homeassistant/components/folder_watcher/manifest.json b/homeassistant/components/folder_watcher/manifest.json index 7b471e08fcc..1f0d9c595ee 100644 --- a/homeassistant/components/folder_watcher/manifest.json +++ b/homeassistant/components/folder_watcher/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["watchdog"], "quality_scale": "internal", - "requirements": ["watchdog==2.3.1"] + "requirements": ["watchdog==6.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9c1285b6d32..e4fcb06671b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2980,7 +2980,7 @@ wakeonlan==2.1.0 wallbox==0.7.0 # homeassistant.components.folder_watcher -watchdog==2.3.1 +watchdog==6.0.0 # homeassistant.components.waterfurnace waterfurnace==1.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 56c8be03f43..257125c450d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2387,7 +2387,7 @@ wakeonlan==2.1.0 wallbox==0.7.0 # homeassistant.components.folder_watcher -watchdog==2.3.1 +watchdog==6.0.0 # homeassistant.components.watergate watergate-local-api==2024.4.1 From e4cca3fe408ed2c20f3eeda9b4b7a73b7bdaf86f Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:22:01 +0100 Subject: [PATCH 175/677] Update devcontainer to Python 3.13 (#132313) --- Dockerfile.dev | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile.dev b/Dockerfile.dev index 48f582a1581..5a3f1a2ae64 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/devcontainers/python:1-3.12 +FROM mcr.microsoft.com/devcontainers/python:1-3.13 SHELL ["/bin/bash", "-o", "pipefail", "-c"] From f9f37b9932f345b8a0cc2615c7feacb6e903d6d9 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Fri, 13 Dec 2024 09:23:53 +0100 Subject: [PATCH 176/677] Velbus docs quality bump (#133070) --- homeassistant/components/velbus/quality_scale.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index 68fe5ead781..ab2df68f973 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -16,10 +16,10 @@ rules: comment: | Dynamically build up the port parameter based on inputs provided by the user, do not fill-in a name parameter, build it up in the config flow dependency-transparency: done - docs-actions: todo - docs-high-level-description: todo - docs-installation-instructions: todo - docs-removal-instructions: todo + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done entity-event-setup: todo entity-unique-id: done has-entity-name: todo From 899fb091fc12dc610c9f74291d61d5bfea8ef166 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:31:21 +0100 Subject: [PATCH 177/677] Simplify access to hass in service calls (#133062) --- homeassistant/core.py | 6 +- tests/components/homeassistant/test_init.py | 1 + tests/components/text/test_init.py | 9 +- tests/conftest.py | 2 +- tests/helpers/test_entity_component.py | 17 +- tests/helpers/test_service.py | 259 ++++++++++++++------ tests/test_core.py | 4 +- 7 files changed, 204 insertions(+), 94 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index 0640664d64f..da7a206b14e 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -2432,10 +2432,11 @@ class Service: class ServiceCall: """Representation of a call to a service.""" - __slots__ = ("domain", "service", "data", "context", "return_response") + __slots__ = ("hass", "domain", "service", "data", "context", "return_response") def __init__( self, + hass: HomeAssistant, domain: str, service: str, data: dict[str, Any] | None = None, @@ -2443,6 +2444,7 @@ class ServiceCall: return_response: bool = False, ) -> None: """Initialize a service call.""" + self.hass = hass self.domain = domain self.service = service self.data = ReadOnlyDict(data or {}) @@ -2768,7 +2770,7 @@ class ServiceRegistry: processed_data = service_data service_call = ServiceCall( - domain, service, processed_data, context, return_response + self._hass, domain, service, processed_data, context, return_response ) self._hass.bus.async_fire_internal( diff --git a/tests/components/homeassistant/test_init.py b/tests/components/homeassistant/test_init.py index 33d78cd6c9f..56eeb4177b1 100644 --- a/tests/components/homeassistant/test_init.py +++ b/tests/components/homeassistant/test_init.py @@ -184,6 +184,7 @@ async def test_turn_on_skips_domains_without_service( # because by mocking out the call service method, we mock out all # So we mimic how the service registry calls services service_call = ha.ServiceCall( + hass, "homeassistant", "turn_on", {"entity_id": ["light.test", "sensor.bla", "binary_sensor.blub", "light.bla"]}, diff --git a/tests/components/text/test_init.py b/tests/components/text/test_init.py index 8e20af6cb7a..3764d481928 100644 --- a/tests/components/text/test_init.py +++ b/tests/components/text/test_init.py @@ -64,21 +64,22 @@ async def test_text_set_value(hass: HomeAssistant) -> None: with pytest.raises(ValueError): await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: ""}) + text, ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: ""}) ) with pytest.raises(ValueError): await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "hello world!"}) + text, + ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "hello world!"}), ) with pytest.raises(ValueError): await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "HELLO"}) + text, ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "HELLO"}) ) await _async_set_value( - text, ServiceCall(DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "test2"}) + text, ServiceCall(hass, DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: "test2"}) ) assert text.state == "test2" diff --git a/tests/conftest.py b/tests/conftest.py index c46ed0407e5..2cefe72f414 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1899,7 +1899,7 @@ def service_calls(hass: HomeAssistant) -> Generator[list[ServiceCall]]: return_response: bool = False, ) -> ServiceResponse: calls.append( - ServiceCall(domain, service, service_data, context, return_response) + ServiceCall(hass, domain, service, service_data, context, return_response) ) try: return await _original_async_call( diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 9723b91eb9a..940bd3e37fd 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -189,13 +189,14 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non ] ) - call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) + call_1 = ServiceCall(hass, "test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert sorted( ent.entity_id for ent in (await component.async_extract_from_service(call_1)) ) == ["test_domain.test_1", "test_domain.test_3"] call_2 = ServiceCall( + hass, "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, @@ -256,17 +257,18 @@ async def test_extract_from_service_fails_if_no_entity_id(hass: HomeAssistant) - ) assert ( - await component.async_extract_from_service(ServiceCall("test", "service")) == [] + await component.async_extract_from_service(ServiceCall(hass, "test", "service")) + == [] ) assert ( await component.async_extract_from_service( - ServiceCall("test", "service", {"entity_id": ENTITY_MATCH_NONE}) + ServiceCall(hass, "test", "service", {"entity_id": ENTITY_MATCH_NONE}) ) == [] ) assert ( await component.async_extract_from_service( - ServiceCall("test", "service", {"area_id": ENTITY_MATCH_NONE}) + ServiceCall(hass, "test", "service", {"area_id": ENTITY_MATCH_NONE}) ) == [] ) @@ -283,6 +285,7 @@ async def test_extract_from_service_filter_out_non_existing_entities( ) call = ServiceCall( + hass, "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, @@ -299,7 +302,7 @@ async def test_extract_from_service_no_group_expand(hass: HomeAssistant) -> None await component.async_setup({}) await component.async_add_entities([MockEntity(entity_id="group.test_group")]) - call = ServiceCall("test", "service", {"entity_id": ["group.test_group"]}) + call = ServiceCall(hass, "test", "service", {"entity_id": ["group.test_group"]}) extracted = await component.async_extract_from_service(call, expand_group=False) assert len(extracted) == 1 @@ -465,7 +468,7 @@ async def test_extract_all_omit_entity_id( [MockEntity(name="test_1"), MockEntity(name="test_2")] ) - call = ServiceCall("test", "service") + call = ServiceCall(hass, "test", "service") assert ( sorted( @@ -485,7 +488,7 @@ async def test_extract_all_use_match_all( [MockEntity(name="test_1"), MockEntity(name="test_2")] ) - call = ServiceCall("test", "service", {"entity_id": "all"}) + call = ServiceCall(hass, "test", "service", {"entity_id": "all"}) assert sorted( ent.entity_id for ent in await component.async_extract_from_service(call) diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index e63cb69909c..6d03e09cdf7 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -642,11 +642,11 @@ async def test_extract_entity_ids(hass: HomeAssistant) -> None: order=None, ) - call = ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) + call = ServiceCall(hass, "light", "turn_on", {ATTR_ENTITY_ID: "light.Bowl"}) assert {"light.bowl"} == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) + call = ServiceCall(hass, "light", "turn_on", {ATTR_ENTITY_ID: "group.test"}) assert {"light.ceiling", "light.kitchen"} == await service.async_extract_entity_ids( hass, call @@ -659,7 +659,7 @@ async def test_extract_entity_ids(hass: HomeAssistant) -> None: assert ( await service.async_extract_entity_ids( hass, - ServiceCall("light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), + ServiceCall(hass, "light", "turn_on", {ATTR_ENTITY_ID: ENTITY_MATCH_NONE}), ) == set() ) @@ -669,20 +669,22 @@ async def test_extract_entity_ids_from_area( hass: HomeAssistant, floor_area_mock ) -> None: """Test extract_entity_ids method with areas.""" - call = ServiceCall("light", "turn_on", {"area_id": "own-area"}) + call = ServiceCall(hass, "light", "turn_on", {"area_id": "own-area"}) assert { "light.in_own_area", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"area_id": "test-area"}) + call = ServiceCall(hass, "light", "turn_on", {"area_id": "test-area"}) assert { "light.in_area", "light.assigned_to_area", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) + call = ServiceCall( + hass, "light", "turn_on", {"area_id": ["test-area", "diff-area"]} + ) assert { "light.in_area", @@ -692,7 +694,7 @@ async def test_extract_entity_ids_from_area( assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) + hass, ServiceCall(hass, "light", "turn_on", {"area_id": ENTITY_MATCH_NONE}) ) == set() ) @@ -703,13 +705,13 @@ async def test_extract_entity_ids_from_devices( ) -> None: """Test extract_entity_ids method with devices.""" assert await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"device_id": "device-no-area-id"}) + hass, ServiceCall(hass, "light", "turn_on", {"device_id": "device-no-area-id"}) ) == { "light.no_area", } assert await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"device_id": "device-area-a-id"}) + hass, ServiceCall(hass, "light", "turn_on", {"device_id": "device-area-a-id"}) ) == { "light.in_area_a", "light.in_area_b", @@ -717,7 +719,8 @@ async def test_extract_entity_ids_from_devices( assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"device_id": "non-existing-id"}) + hass, + ServiceCall(hass, "light", "turn_on", {"device_id": "non-existing-id"}), ) == set() ) @@ -726,14 +729,16 @@ async def test_extract_entity_ids_from_devices( @pytest.mark.usefixtures("floor_area_mock") async def test_extract_entity_ids_from_floor(hass: HomeAssistant) -> None: """Test extract_entity_ids method with floors.""" - call = ServiceCall("light", "turn_on", {"floor_id": "test-floor"}) + call = ServiceCall(hass, "light", "turn_on", {"floor_id": "test-floor"}) assert { "light.in_area", "light.assigned_to_area", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"floor_id": ["test-floor", "floor-a"]}) + call = ServiceCall( + hass, "light", "turn_on", {"floor_id": ["test-floor", "floor-a"]} + ) assert { "light.in_area", @@ -743,7 +748,7 @@ async def test_extract_entity_ids_from_floor(hass: HomeAssistant) -> None: assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"floor_id": ENTITY_MATCH_NONE}) + hass, ServiceCall(hass, "light", "turn_on", {"floor_id": ENTITY_MATCH_NONE}) ) == set() ) @@ -752,13 +757,13 @@ async def test_extract_entity_ids_from_floor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("label_mock") async def test_extract_entity_ids_from_labels(hass: HomeAssistant) -> None: """Test extract_entity_ids method with labels.""" - call = ServiceCall("light", "turn_on", {"label_id": "my-label"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "my-label"}) assert { "light.with_my_label", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"label_id": "label1"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "label1"}) assert { "light.with_label1_from_device", @@ -767,14 +772,14 @@ async def test_extract_entity_ids_from_labels(hass: HomeAssistant) -> None: "light.with_label1_and_label2_from_device", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"label_id": ["label2"]}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": ["label2"]}) assert { "light.with_labels_from_device", "light.with_label1_and_label2_from_device", } == await service.async_extract_entity_ids(hass, call) - call = ServiceCall("light", "turn_on", {"label_id": ["label_area"]}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": ["label_area"]}) assert { "light.with_labels_from_device", @@ -782,7 +787,7 @@ async def test_extract_entity_ids_from_labels(hass: HomeAssistant) -> None: assert ( await service.async_extract_entity_ids( - hass, ServiceCall("light", "turn_on", {"label_id": ENTITY_MATCH_NONE}) + hass, ServiceCall(hass, "light", "turn_on", {"label_id": ENTITY_MATCH_NONE}) ) == set() ) @@ -1281,7 +1286,7 @@ async def test_call_with_required_features(hass: HomeAssistant, mock_entities) - hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A], ) @@ -1305,7 +1310,7 @@ async def test_call_with_required_features(hass: HomeAssistant, mock_entities) - mock_entities, HassJob(test_service_mock), ServiceCall( - "test_domain", "test_service", {"entity_id": "light.living_room"} + hass, "test_domain", "test_service", {"entity_id": "light.living_room"} ), required_features=[SUPPORT_A], ) @@ -1321,7 +1326,7 @@ async def test_call_with_both_required_features( hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A | SUPPORT_B], ) @@ -1340,7 +1345,7 @@ async def test_call_with_one_of_required_features( hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), required_features=[SUPPORT_A, SUPPORT_C], ) @@ -1361,7 +1366,9 @@ async def test_call_with_sync_func(hass: HomeAssistant, mock_entities) -> None: hass, mock_entities, HassJob(test_service_mock), - ServiceCall("test_domain", "test_service", {"entity_id": "light.kitchen"}), + ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen"} + ), ) assert test_service_mock.call_count == 1 @@ -1374,6 +1381,7 @@ async def test_call_with_sync_attr(hass: HomeAssistant, mock_entities) -> None: mock_entities, "sync_method", ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen", "area_id": "abcd"}, @@ -1392,6 +1400,7 @@ async def test_call_context_user_not_exist(hass: HomeAssistant) -> None: {}, Mock(), ServiceCall( + hass, "test_domain", "test_service", context=Context(user_id="non-existing"), @@ -1419,6 +1428,7 @@ async def test_call_context_target_all( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL}, @@ -1447,6 +1457,7 @@ async def test_call_context_target_specific( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen"}, @@ -1474,6 +1485,7 @@ async def test_call_context_target_specific_no_auth( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", {"entity_id": "light.kitchen"}, @@ -1494,7 +1506,7 @@ async def test_call_no_context_target_all( mock_entities, Mock(), ServiceCall( - "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} + hass, "test_domain", "test_service", data={"entity_id": ENTITY_MATCH_ALL} ), ) @@ -1513,6 +1525,7 @@ async def test_call_no_context_target_specific( mock_entities, Mock(), ServiceCall( + hass, "test_domain", "test_service", {"entity_id": ["light.kitchen", "light.non-existing"]}, @@ -1534,7 +1547,7 @@ async def test_call_with_match_all( hass, mock_entities, Mock(), - ServiceCall("test_domain", "test_service", {"entity_id": "all"}), + ServiceCall(hass, "test_domain", "test_service", {"entity_id": "all"}), ) assert len(mock_handle_entity_call.mock_calls) == 4 @@ -1551,7 +1564,7 @@ async def test_call_with_omit_entity_id( hass, mock_entities, Mock(), - ServiceCall("test_domain", "test_service"), + ServiceCall(hass, "test_domain", "test_service"), ) assert len(mock_handle_entity_call.mock_calls) == 0 @@ -1797,7 +1810,7 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non MockEntity(name="test_4", entity_id="test_domain.test_4", available=False), ] - call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) + call_1 = ServiceCall(hass, "test", "service", data={"entity_id": ENTITY_MATCH_ALL}) assert [ ent.entity_id @@ -1805,6 +1818,7 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non ] == ["test_domain.test_1", "test_domain.test_3"] call_2 = ServiceCall( + hass, "test", "service", data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, @@ -1820,6 +1834,7 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non hass, entities, ServiceCall( + hass, "test", "service", data={"entity_id": ENTITY_MATCH_NONE}, @@ -1835,7 +1850,7 @@ async def test_extract_from_service_empty_if_no_entity_id(hass: HomeAssistant) - MockEntity(name="test_1", entity_id="test_domain.test_1"), MockEntity(name="test_2", entity_id="test_domain.test_2"), ] - call = ServiceCall("test", "service") + call = ServiceCall(hass, "test", "service") assert [ ent.entity_id @@ -1853,6 +1868,7 @@ async def test_extract_from_service_filter_out_non_existing_entities( ] call = ServiceCall( + hass, "test", "service", {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, @@ -1874,12 +1890,14 @@ async def test_extract_from_service_area_id( MockEntity(name="diff_area", entity_id="light.diff_area"), ] - call = ServiceCall("light", "turn_on", {"area_id": "test-area"}) + call = ServiceCall(hass, "light", "turn_on", {"area_id": "test-area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.in_area" - call = ServiceCall("light", "turn_on", {"area_id": ["test-area", "diff-area"]}) + call = ServiceCall( + hass, "light", "turn_on", {"area_id": ["test-area", "diff-area"]} + ) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ @@ -1888,6 +1906,7 @@ async def test_extract_from_service_area_id( ] call = ServiceCall( + hass, "light", "turn_on", {"area_id": ["test-area", "diff-area"], "device_id": "device-no-area-id"}, @@ -1912,17 +1931,17 @@ async def test_extract_from_service_label_id(hass: HomeAssistant) -> None: ), ] - call = ServiceCall("light", "turn_on", {"label_id": "label_area"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "label_area"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.with_labels_from_device" - call = ServiceCall("light", "turn_on", {"label_id": "my-label"}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": "my-label"}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 1 assert extracted[0].entity_id == "light.with_my_label" - call = ServiceCall("light", "turn_on", {"label_id": ["my-label", "label1"]}) + call = ServiceCall(hass, "light", "turn_on", {"label_id": ["my-label", "label1"]}) extracted = await service.async_extract_entities(hass, entities, call) assert len(extracted) == 2 assert sorted(ent.entity_id for ent in extracted) == [ @@ -1931,6 +1950,7 @@ async def test_extract_from_service_label_id(hass: HomeAssistant) -> None: ] call = ServiceCall( + hass, "light", "turn_on", {"label_id": ["my-label", "label1"], "device_id": "device-no-labels"}, @@ -1949,6 +1969,7 @@ async def test_entity_service_call_warn_referenced( ) -> None: """Test we only warn for referenced entities in entity_service_call.""" call = ServiceCall( + hass, "light", "turn_on", { @@ -1972,6 +1993,7 @@ async def test_async_extract_entities_warn_referenced( ) -> None: """Test we only warn for referenced entities in async_extract_entities.""" call = ServiceCall( + hass, "light", "turn_on", { @@ -1997,6 +2019,7 @@ async def test_async_extract_config_entry_ids(hass: HomeAssistant) -> None: device_no_entities = dr.DeviceEntry(id="device-no-entities", config_entries={"abc"}) call = ServiceCall( + hass, "homeassistant", "reload_config_entry", { @@ -2042,17 +2065,33 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloader = service.ReloadServiceHelper(reload_service_handler, reload_targets) tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, target1) # while the first task is reloaded, note that target1 can't be deduplicated # because it's already being reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered( @@ -2063,13 +2102,21 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, all) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service(ServiceCall(hass, "test", "test")), ] await asyncio.gather(*tasks) assert reloaded == unordered(["target1", "target2", "target3", "target4", "all"]) @@ -2078,13 +2125,21 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (all) - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service(ServiceCall(hass, "test", "test")), # These reload tasks will be deduplicated to (target1, target2, target3, target4) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered(["all", "target1", "target2", "target3", "target4"]) @@ -2093,21 +2148,45 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, target1) # while the first task is reloaded, note that target1 can't be deduplicated # because it's already being reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered( @@ -2118,14 +2197,22 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (target1) - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), # These reload tasks will be deduplicated to (target2, target3, target4, all) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test")), - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service(ServiceCall(hass, "test", "test")), + reloader.execute_service(ServiceCall(hass, "test", "test")), ] await asyncio.gather(*tasks) assert reloaded == unordered(["target1", "target2", "target3", "target4", "all"]) @@ -2134,17 +2221,33 @@ async def test_reload_service_helper(hass: HomeAssistant) -> None: reloaded.clear() tasks = [ # This reload task will start executing first, (all) - reloader.execute_service(ServiceCall("test", "test")), + reloader.execute_service(ServiceCall(hass, "test", "test")), # These reload tasks will be deduplicated to (target1, target2, target3, target4) # while the first task is reloaded. - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target1"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target2"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target3"})), - reloader.execute_service(ServiceCall("test", "test", {"target": "target4"})), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target1"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target2"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target3"}) + ), + reloader.execute_service( + ServiceCall(hass, "test", "test", {"target": "target4"}) + ), ] await asyncio.gather(*tasks) assert reloaded == unordered(["all", "target1", "target2", "target3", "target4"]) diff --git a/tests/test_core.py b/tests/test_core.py index 0100c35055e..60b907d57ca 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1562,10 +1562,10 @@ async def test_statemachine_avoids_updating_attributes(hass: HomeAssistant) -> N def test_service_call_repr() -> None: """Test ServiceCall repr.""" - call = ha.ServiceCall("homeassistant", "start") + call = ha.ServiceCall(None, "homeassistant", "start") assert str(call) == f"" - call2 = ha.ServiceCall("homeassistant", "start", {"fast": "yes"}) + call2 = ha.ServiceCall(None, "homeassistant", "start", {"fast": "yes"}) assert ( str(call2) == f"" From a0e49ebc97cd860637f74976931487b2c65a0e99 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:33:40 +0100 Subject: [PATCH 178/677] Use internal min/max mireds in template (#133113) --- homeassistant/components/template/light.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index 9c7bc23022a..0654a42406a 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -78,6 +78,9 @@ CONF_TEMPERATURE_TEMPLATE = "temperature_template" CONF_WHITE_VALUE_ACTION = "set_white_value" CONF_WHITE_VALUE_TEMPLATE = "white_value_template" +DEFAULT_MIN_MIREDS = 153 +DEFAULT_MAX_MIREDS = 500 + LIGHT_SCHEMA = vol.All( cv.deprecated(CONF_ENTITY_ID), vol.Schema( @@ -764,7 +767,9 @@ class LightTemplate(TemplateEntity, LightEntity): self._temperature = None return temperature = int(render) - if self.min_mireds <= temperature <= self.max_mireds: + min_mireds = self._min_mireds or DEFAULT_MIN_MIREDS + max_mireds = self._max_mireds or DEFAULT_MAX_MIREDS + if min_mireds <= temperature <= max_mireds: self._temperature = temperature else: _LOGGER.error( @@ -774,8 +779,8 @@ class LightTemplate(TemplateEntity, LightEntity): ), temperature, self.entity_id, - self.min_mireds, - self.max_mireds, + min_mireds, + max_mireds, ) self._temperature = None except ValueError: From 9ab69aa41c4afe15a48d1af03770e49a734c669b Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Fri, 13 Dec 2024 09:33:58 +0100 Subject: [PATCH 179/677] Add mWh as unit of measurement for Matter energy sensors (#133005) --- homeassistant/components/matter/sensor.py | 5 +++-- homeassistant/components/number/const.py | 4 ++-- homeassistant/components/random/config_flow.py | 6 +++++- homeassistant/components/sensor/const.py | 4 ++-- homeassistant/components/template/config_flow.py | 6 +++++- homeassistant/const.py | 1 + homeassistant/util/unit_conversion.py | 1 + tests/components/matter/snapshots/test_sensor.ambr | 6 ++++++ tests/components/template/test_config_flow.py | 2 +- tests/util/test_unit_conversion.py | 2 ++ 10 files changed, 28 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index e10f081d497..b2a5da2aa71 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -612,11 +612,12 @@ DISCOVERY_SCHEMAS = [ key="ElectricalEnergyMeasurementCumulativeEnergyImported", device_class=SensorDeviceClass.ENERGY, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.MILLIWATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, suggested_display_precision=3, state_class=SensorStateClass.TOTAL_INCREASING, # id 0 of the EnergyMeasurementStruct is the cumulative energy (in mWh) - measurement_to_ha=lambda x: x.energy / 1000000, + measurement_to_ha=lambda x: x.energy, ), entity_class=MatterSensor, required_attributes=( diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 47158826e75..56466934e5f 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -163,7 +163,7 @@ class NumberDeviceClass(StrEnum): ENERGY = "energy" """Energy. - Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ ENERGY_STORAGE = "energy_storage" @@ -172,7 +172,7 @@ class NumberDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ FREQUENCY = "frequency" diff --git a/homeassistant/components/random/config_flow.py b/homeassistant/components/random/config_flow.py index 00314169260..35b7757580e 100644 --- a/homeassistant/components/random/config_flow.py +++ b/homeassistant/components/random/config_flow.py @@ -106,8 +106,12 @@ def _validate_unit(options: dict[str, Any]) -> None: and (units := DEVICE_CLASS_UNITS.get(device_class)) and (unit := options.get(CONF_UNIT_OF_MEASUREMENT)) not in units ): + # Sort twice to make sure strings with same case-insensitive order of + # letters are sorted consistently still (sorted() is guaranteed stable). sorted_units = sorted( - [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + sorted( + [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + ), key=str.casefold, ) if len(sorted_units) == 1: diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index a2e3cb52173..2fb563051a9 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -191,7 +191,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring energy consumption, for example electric energy consumption. - Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ ENERGY_STORAGE = "energy_storage" @@ -200,7 +200,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `mWh`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ FREQUENCY = "frequency" diff --git a/homeassistant/components/template/config_flow.py b/homeassistant/components/template/config_flow.py index 8ecef8539d3..e6cc377bc26 100644 --- a/homeassistant/components/template/config_flow.py +++ b/homeassistant/components/template/config_flow.py @@ -235,8 +235,12 @@ def _validate_unit(options: dict[str, Any]) -> None: and (units := DEVICE_CLASS_UNITS.get(device_class)) is not None and (unit := options.get(CONF_UNIT_OF_MEASUREMENT)) not in units ): + # Sort twice to make sure strings with same case-insensitive order of + # letters are sorted consistently still. sorted_units = sorted( - [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + sorted( + [f"'{unit!s}'" if unit else "no unit of measurement" for unit in units], + ), key=str.casefold, ) if len(sorted_units) == 1: diff --git a/homeassistant/const.py b/homeassistant/const.py index 2eb4194ad15..c026a8e5427 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -619,6 +619,7 @@ class UnitOfEnergy(StrEnum): KILO_JOULE = "kJ" MEGA_JOULE = "MJ" GIGA_JOULE = "GJ" + MILLIWATT_HOUR = "mWh" WATT_HOUR = "Wh" KILO_WATT_HOUR = "kWh" MEGA_WATT_HOUR = "MWh" diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 3cffcb5768e..8bf6d4b9fc9 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -266,6 +266,7 @@ class EnergyConverter(BaseUnitConverter): UnitOfEnergy.KILO_JOULE: _WH_TO_J, UnitOfEnergy.MEGA_JOULE: _WH_TO_J / 1e3, UnitOfEnergy.GIGA_JOULE: _WH_TO_J / 1e6, + UnitOfEnergy.MILLIWATT_HOUR: 1e6, UnitOfEnergy.WATT_HOUR: 1e3, UnitOfEnergy.KILO_WATT_HOUR: 1, UnitOfEnergy.MEGA_WATT_HOUR: 1 / 1e3, diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index 96346b906c3..44ad02d4b1e 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -1543,6 +1543,9 @@ 'sensor': dict({ 'suggested_display_precision': 3, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2480,6 +2483,9 @@ 'sensor': dict({ 'suggested_display_precision': 3, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index e0d95ff968d..2c9b81e7c91 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -804,7 +804,7 @@ EARLY_END_ERROR = "invalid template (TemplateSyntaxError: unexpected 'end of tem ), "unit_of_measurement": ( "'None' is not a valid unit for device class 'energy'; " - "expected one of 'cal', 'Gcal', 'GJ', 'GWh', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'TWh', 'Wh'" + "expected one of 'cal', 'Gcal', 'GJ', 'GWh', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'mWh', 'TWh', 'Wh'" ), }, ), diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index 4d1eda3d8de..4be32b2851e 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -441,6 +441,8 @@ _CONVERTED_VALUE: dict[ (5, UnitOfElectricPotential.MICROVOLT, 5e-6, UnitOfElectricPotential.VOLT), ], EnergyConverter: [ + (10, UnitOfEnergy.MILLIWATT_HOUR, 0.00001, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.WATT_HOUR, 10000, UnitOfEnergy.MILLIWATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.01, UnitOfEnergy.KILO_WATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.00001, UnitOfEnergy.MEGA_WATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.00000001, UnitOfEnergy.GIGA_WATT_HOUR), From 2cd4ebbfb20ebee2994e326bec44999f89211c18 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 13 Dec 2024 09:45:38 +0100 Subject: [PATCH 180/677] Bump deebot-client to 9.4.0 (#133114) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index b9315e0c1c6..271f9ee8dcd 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==9.3.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==9.4.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index e4fcb06671b..cc715c895f9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -739,7 +739,7 @@ debugpy==1.8.8 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==9.3.0 +deebot-client==9.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 257125c450d..7094270a7a6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -629,7 +629,7 @@ dbus-fast==2.24.3 debugpy==1.8.8 # homeassistant.components.ecovacs -deebot-client==9.3.0 +deebot-client==9.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 566843591eccdc6c57468a0d1f39d56b618b942a Mon Sep 17 00:00:00 2001 From: Andrew Sayre <6730289+andrewsayre@users.noreply.github.com> Date: Fri, 13 Dec 2024 02:46:52 -0600 Subject: [PATCH 181/677] Remove HEOS yaml import (#133082) --- homeassistant/components/heos/__init__.py | 38 +---- homeassistant/components/heos/config_flow.py | 35 ++--- homeassistant/components/heos/const.py | 1 - homeassistant/components/heos/manifest.json | 1 + .../components/heos/quality_scale.yaml | 25 +-- homeassistant/components/heos/strings.json | 1 + tests/components/heos/conftest.py | 19 +++ tests/components/heos/test_config_flow.py | 145 +++++++----------- tests/components/heos/test_init.py | 29 ---- 9 files changed, 92 insertions(+), 202 deletions(-) diff --git a/homeassistant/components/heos/__init__.py b/homeassistant/components/heos/__init__.py index de56e541501..e6a46f5a4ca 100644 --- a/homeassistant/components/heos/__init__.py +++ b/homeassistant/components/heos/__init__.py @@ -8,23 +8,19 @@ from datetime import timedelta import logging from pyheos import Heos, HeosError, HeosPlayer, const as heos_const -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) -from homeassistant.helpers.typing import ConfigType from homeassistant.util import Throttle from . import services -from .config_flow import format_title from .const import ( COMMAND_RETRY_ATTEMPTS, COMMAND_RETRY_DELAY, @@ -35,14 +31,6 @@ from .const import ( PLATFORMS = [Platform.MEDIA_PLAYER] -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - {DOMAIN: vol.Schema({vol.Required(CONF_HOST): cv.string})}, - ), - extra=vol.ALLOW_EXTRA, -) - MIN_UPDATE_SOURCES = timedelta(seconds=1) _LOGGER = logging.getLogger(__name__) @@ -61,30 +49,6 @@ class HeosRuntimeData: type HeosConfigEntry = ConfigEntry[HeosRuntimeData] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the HEOS component.""" - if DOMAIN not in config: - return True - host = config[DOMAIN][CONF_HOST] - entries = hass.config_entries.async_entries(DOMAIN) - if not entries: - # Create new entry based on config - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: host} - ) - ) - else: - # Check if host needs to be updated - entry = entries[0] - if entry.data[CONF_HOST] != host: - hass.config_entries.async_update_entry( - entry, title=format_title(host), data={**entry.data, CONF_HOST: host} - ) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool: """Initialize config entry which represents the HEOS controller.""" # For backwards compat diff --git a/homeassistant/components/heos/config_flow.py b/homeassistant/components/heos/config_flow.py index 57ed51a3c05..e8a4dbf7b63 100644 --- a/homeassistant/components/heos/config_flow.py +++ b/homeassistant/components/heos/config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components import ssdp from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST -from .const import DATA_DISCOVERED_HOSTS, DOMAIN +from .const import DOMAIN def format_title(host: str) -> str: @@ -34,43 +34,32 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): friendly_name = ( f"{discovery_info.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME]} ({hostname})" ) - self.hass.data.setdefault(DATA_DISCOVERED_HOSTS, {}) - self.hass.data[DATA_DISCOVERED_HOSTS][friendly_name] = hostname - # Abort if other flows in progress or an entry already exists - if self._async_in_progress() or self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") + self.hass.data.setdefault(DOMAIN, {}) + self.hass.data[DOMAIN][friendly_name] = hostname await self.async_set_unique_id(DOMAIN) # Show selection form return self.async_show_form(step_id="user") - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Occurs when an entry is setup through config.""" - host = import_data[CONF_HOST] - # raise_on_progress is False here in case ssdp discovers - # heos first which would block the import - await self.async_set_unique_id(DOMAIN, raise_on_progress=False) - return self.async_create_entry(title=format_title(host), data={CONF_HOST: host}) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Obtain host and validate connection.""" - self.hass.data.setdefault(DATA_DISCOVERED_HOSTS, {}) - # Only a single entry is needed for all devices - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") + self.hass.data.setdefault(DOMAIN, {}) + await self.async_set_unique_id(DOMAIN) # Try connecting to host if provided errors = {} host = None if user_input is not None: host = user_input[CONF_HOST] # Map host from friendly name if in discovered hosts - host = self.hass.data[DATA_DISCOVERED_HOSTS].get(host, host) + host = self.hass.data[DOMAIN].get(host, host) heos = Heos(host) try: await heos.connect() - self.hass.data.pop(DATA_DISCOVERED_HOSTS) - return await self.async_step_import({CONF_HOST: host}) + self.hass.data.pop(DOMAIN) + return self.async_create_entry( + title=format_title(host), data={CONF_HOST: host} + ) except HeosError: errors[CONF_HOST] = "cannot_connect" finally: @@ -78,9 +67,7 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): # Return form host_type = ( - str - if not self.hass.data[DATA_DISCOVERED_HOSTS] - else vol.In(list(self.hass.data[DATA_DISCOVERED_HOSTS])) + str if not self.hass.data[DOMAIN] else vol.In(list(self.hass.data[DOMAIN])) ) return self.async_show_form( step_id="user", diff --git a/homeassistant/components/heos/const.py b/homeassistant/components/heos/const.py index 827a0c53fbf..5b2df2b5ebf 100644 --- a/homeassistant/components/heos/const.py +++ b/homeassistant/components/heos/const.py @@ -4,7 +4,6 @@ ATTR_PASSWORD = "password" ATTR_USERNAME = "username" COMMAND_RETRY_ATTEMPTS = 2 COMMAND_RETRY_DELAY = 1 -DATA_DISCOVERED_HOSTS = "heos_discovered_hosts" DOMAIN = "heos" SERVICE_SIGN_IN = "sign_in" SERVICE_SIGN_OUT = "sign_out" diff --git a/homeassistant/components/heos/manifest.json b/homeassistant/components/heos/manifest.json index a90f0aebaae..12f10bcd0e3 100644 --- a/homeassistant/components/heos/manifest.json +++ b/homeassistant/components/heos/manifest.json @@ -7,6 +7,7 @@ "iot_class": "local_push", "loggers": ["pyheos"], "requirements": ["pyheos==0.7.2"], + "single_config_entry": true, "ssdp": [ { "st": "urn:schemas-denon-com:device:ACT-Denon:1" diff --git a/homeassistant/components/heos/quality_scale.yaml b/homeassistant/components/heos/quality_scale.yaml index ed9939bf37c..861ca750780 100644 --- a/homeassistant/components/heos/quality_scale.yaml +++ b/homeassistant/components/heos/quality_scale.yaml @@ -8,19 +8,10 @@ rules: comment: Integration is a local push integration brands: done common-modules: todo - config-flow-test-coverage: - status: todo - comment: - 1. The config flow is 100% covered, however some tests need to let HA create the flow - handler instead of doing it manually in the test. - 2. We should also make sure every test ends in either CREATE_ENTRY or ABORT so we test - that the flow is able to recover from an error. + config-flow-test-coverage: done config-flow: - status: todo - comment: | - 1. YAML import to be removed after core team meeting discussion on approach. - 2. Consider enhnacement to automatically select a host when multiple are discovered. - 3. Move hass.data[heos_discovered_hosts] into hass.data[heos] + status: done + comment: Consider enhnacement to automatically select a host when multiple are discovered. dependency-transparency: done docs-actions: done docs-high-level-description: done @@ -34,15 +25,9 @@ rules: entity-unique-id: done has-entity-name: done runtime-data: done - test-before-configure: todo + test-before-configure: done test-before-setup: done - unique-config-entry: - status: todo - comment: | - The HEOS integration only supports a single config entry, but needs to be migrated to use - the `single_config_entry` flag. HEOS devices interconnect to each other, so connecting to - a single node yields access to all the devices setup with HEOS on your network. The HEOS API - documentation does not recommend connecting to multiple nodes which would provide no bennefit. + unique-config-entry: done # Silver action-exceptions: status: todo diff --git a/homeassistant/components/heos/strings.json b/homeassistant/components/heos/strings.json index df18fc7834a..20a8a2e978b 100644 --- a/homeassistant/components/heos/strings.json +++ b/homeassistant/components/heos/strings.json @@ -16,6 +16,7 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, diff --git a/tests/components/heos/conftest.py b/tests/components/heos/conftest.py index a12f4c610ad..95a388d87a8 100644 --- a/tests/components/heos/conftest.py +++ b/tests/components/heos/conftest.py @@ -164,6 +164,25 @@ def discovery_data_fixture() -> dict: ) +@pytest.fixture(name="discovery_data_bedroom") +def discovery_data_fixture_bedroom() -> dict: + """Return mock discovery data for testing.""" + return ssdp.SsdpServiceInfo( + ssdp_usn="mock_usn", + ssdp_st="mock_st", + ssdp_location="http://127.0.0.2:60006/upnp/desc/aios_device/aios_device.xml", + upnp={ + ssdp.ATTR_UPNP_DEVICE_TYPE: "urn:schemas-denon-com:device:AiosDevice:1", + ssdp.ATTR_UPNP_FRIENDLY_NAME: "Bedroom", + ssdp.ATTR_UPNP_MANUFACTURER: "Denon", + ssdp.ATTR_UPNP_MODEL_NAME: "HEOS Drive", + ssdp.ATTR_UPNP_MODEL_NUMBER: "DWSA-10 4.0", + ssdp.ATTR_UPNP_SERIAL: None, + ssdp.ATTR_UPNP_UDN: "uuid:e61de70c-2250-1c22-0080-0005cdf512be", + }, + ) + + @pytest.fixture(name="quick_selects") def quick_selects_fixture() -> dict[int, str]: """Create a dict of quick selects for testing.""" diff --git a/tests/components/heos/test_config_flow.py b/tests/components/heos/test_config_flow.py index 7b737d7bb4b..464b62df157 100644 --- a/tests/components/heos/test_config_flow.py +++ b/tests/components/heos/test_config_flow.py @@ -1,14 +1,10 @@ """Tests for the Heos config flow module.""" -from unittest.mock import patch -from urllib.parse import urlparse - from pyheos import HeosError from homeassistant.components import heos, ssdp -from homeassistant.components.heos.config_flow import HeosFlowHandler -from homeassistant.components.heos.const import DATA_DISCOVERED_HOSTS, DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_SSDP, SOURCE_USER +from homeassistant.components.heos.const import DOMAIN +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -17,18 +13,20 @@ from homeassistant.data_entry_flow import FlowResultType async def test_flow_aborts_already_setup(hass: HomeAssistant, config_entry) -> None: """Test flow aborts when entry already setup.""" config_entry.add_to_hass(hass) - flow = HeosFlowHandler() - flow.hass = hass - result = await flow.async_step_user() + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" async def test_no_host_shows_form(hass: HomeAssistant) -> None: """Test form is shown when host not provided.""" - flow = HeosFlowHandler() - flow.hass = hass - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} @@ -45,73 +43,69 @@ async def test_cannot_connect_shows_error_form(hass: HomeAssistant, controller) assert result["errors"][CONF_HOST] == "cannot_connect" assert controller.connect.call_count == 1 assert controller.disconnect.call_count == 1 - controller.connect.reset_mock() - controller.disconnect.reset_mock() async def test_create_entry_when_host_valid(hass: HomeAssistant, controller) -> None: """Test result type is create entry when host is valid.""" data = {CONF_HOST: "127.0.0.1"} - with patch("homeassistant.components.heos.async_setup_entry", return_value=True): - result = await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_USER}, data=data - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == DOMAIN - assert result["title"] == "Controller (127.0.0.1)" - assert result["data"] == data - assert controller.connect.call_count == 1 - assert controller.disconnect.call_count == 1 + + result = await hass.config_entries.flow.async_init( + heos.DOMAIN, context={"source": SOURCE_USER}, data=data + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == DOMAIN + assert result["title"] == "Controller (127.0.0.1)" + assert result["data"] == data + assert controller.connect.call_count == 2 # Also called in async_setup_entry + assert controller.disconnect.call_count == 1 async def test_create_entry_when_friendly_name_valid( hass: HomeAssistant, controller ) -> None: """Test result type is create entry when friendly name is valid.""" - hass.data[DATA_DISCOVERED_HOSTS] = {"Office (127.0.0.1)": "127.0.0.1"} + hass.data[DOMAIN] = {"Office (127.0.0.1)": "127.0.0.1"} data = {CONF_HOST: "Office (127.0.0.1)"} - with patch("homeassistant.components.heos.async_setup_entry", return_value=True): - result = await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_USER}, data=data - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == DOMAIN - assert result["title"] == "Controller (127.0.0.1)" - assert result["data"] == {CONF_HOST: "127.0.0.1"} - assert controller.connect.call_count == 1 - assert controller.disconnect.call_count == 1 - assert DATA_DISCOVERED_HOSTS not in hass.data + + result = await hass.config_entries.flow.async_init( + heos.DOMAIN, context={"source": SOURCE_USER}, data=data + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == DOMAIN + assert result["title"] == "Controller (127.0.0.1)" + assert result["data"] == {CONF_HOST: "127.0.0.1"} + assert controller.connect.call_count == 2 # Also called in async_setup_entry + assert controller.disconnect.call_count == 1 + assert DOMAIN not in hass.data async def test_discovery_shows_create_form( - hass: HomeAssistant, controller, discovery_data: ssdp.SsdpServiceInfo + hass: HomeAssistant, + controller, + discovery_data: ssdp.SsdpServiceInfo, + discovery_data_bedroom: ssdp.SsdpServiceInfo, ) -> None: - """Test discovery shows form to confirm setup and subsequent abort.""" + """Test discovery shows form to confirm setup.""" - await hass.config_entries.flow.async_init( + # Single discovered host shows form for user to finish setup. + result = await hass.config_entries.flow.async_init( heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data ) - await hass.async_block_till_done() - flows_in_progress = hass.config_entries.flow.async_progress() - assert flows_in_progress[0]["context"]["unique_id"] == DOMAIN - assert len(flows_in_progress) == 1 - assert hass.data[DATA_DISCOVERED_HOSTS] == {"Office (127.0.0.1)": "127.0.0.1"} + assert hass.data[DOMAIN] == {"Office (127.0.0.1)": "127.0.0.1"} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - port = urlparse(discovery_data.ssdp_location).port - discovery_data.ssdp_location = f"http://127.0.0.2:{port}/" - discovery_data.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME] = "Bedroom" - - await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data + # Subsequent discovered hosts append to discovered hosts and abort. + result = await hass.config_entries.flow.async_init( + heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data_bedroom ) - await hass.async_block_till_done() - flows_in_progress = hass.config_entries.flow.async_progress() - assert flows_in_progress[0]["context"]["unique_id"] == DOMAIN - assert len(flows_in_progress) == 1 - assert hass.data[DATA_DISCOVERED_HOSTS] == { + assert hass.data[DOMAIN] == { "Office (127.0.0.1)": "127.0.0.1", "Bedroom (127.0.0.2)": "127.0.0.2", } + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_in_progress" async def test_discovery_flow_aborts_already_setup( @@ -119,41 +113,10 @@ async def test_discovery_flow_aborts_already_setup( ) -> None: """Test discovery flow aborts when entry already setup.""" config_entry.add_to_hass(hass) - flow = HeosFlowHandler() - flow.hass = hass - result = await flow.async_step_ssdp(discovery_data) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data + ) + assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" - - -async def test_discovery_sets_the_unique_id( - hass: HomeAssistant, controller, discovery_data: ssdp.SsdpServiceInfo -) -> None: - """Test discovery sets the unique id.""" - - port = urlparse(discovery_data.ssdp_location).port - discovery_data.ssdp_location = f"http://127.0.0.2:{port}/" - discovery_data.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME] = "Bedroom" - - await hass.config_entries.flow.async_init( - heos.DOMAIN, context={"source": SOURCE_SSDP}, data=discovery_data - ) - await hass.async_block_till_done() - flows_in_progress = hass.config_entries.flow.async_progress() - assert flows_in_progress[0]["context"]["unique_id"] == DOMAIN - assert len(flows_in_progress) == 1 - assert hass.data[DATA_DISCOVERED_HOSTS] == {"Bedroom (127.0.0.2)": "127.0.0.2"} - - -async def test_import_sets_the_unique_id(hass: HomeAssistant, controller) -> None: - """Test import sets the unique id.""" - - with patch("homeassistant.components.heos.async_setup_entry", return_value=True): - result = await hass.config_entries.flow.async_init( - heos.DOMAIN, - context={"source": SOURCE_IMPORT}, - data={CONF_HOST: "127.0.0.2"}, - ) - await hass.async_block_till_done() - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["result"].unique_id == DOMAIN diff --git a/tests/components/heos/test_init.py b/tests/components/heos/test_init.py index 04b745135d4..8d2e3b68a22 100644 --- a/tests/components/heos/test_init.py +++ b/tests/components/heos/test_init.py @@ -13,40 +13,11 @@ from homeassistant.components.heos import ( async_unload_entry, ) from homeassistant.components.heos.const import DOMAIN -from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.setup import async_setup_component -async def test_async_setup_creates_entry(hass: HomeAssistant, config) -> None: - """Test component setup creates entry from config.""" - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - entry = entries[0] - assert entry.title == "Controller (127.0.0.1)" - assert entry.data == {CONF_HOST: "127.0.0.1"} - assert entry.unique_id == DOMAIN - - -async def test_async_setup_updates_entry( - hass: HomeAssistant, config_entry, config, controller -) -> None: - """Test component setup updates entry from config.""" - config[DOMAIN][CONF_HOST] = "127.0.0.2" - config_entry.add_to_hass(hass) - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - entry = entries[0] - assert entry.title == "Controller (127.0.0.2)" - assert entry.data == {CONF_HOST: "127.0.0.2"} - assert entry.unique_id == DOMAIN - - async def test_async_setup_returns_true( hass: HomeAssistant, config_entry, config ) -> None: From 3d93561e0a69c149a6f000882e82fd1e1422d0d6 Mon Sep 17 00:00:00 2001 From: Jan Rieger <271149+jrieger@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:47:39 +0100 Subject: [PATCH 182/677] Remove `native_unit_of_measurement` from rfxtrx counters (#133108) --- homeassistant/components/rfxtrx/sensor.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/rfxtrx/sensor.py b/homeassistant/components/rfxtrx/sensor.py index cc195c9944e..4f8ae9767e2 100644 --- a/homeassistant/components/rfxtrx/sensor.py +++ b/homeassistant/components/rfxtrx/sensor.py @@ -182,13 +182,11 @@ SENSOR_TYPES = ( key="Count", translation_key="count", state_class=SensorStateClass.TOTAL_INCREASING, - native_unit_of_measurement="count", ), RfxtrxSensorEntityDescription( key="Counter value", translation_key="counter_value", state_class=SensorStateClass.TOTAL_INCREASING, - native_unit_of_measurement="count", ), RfxtrxSensorEntityDescription( key="Chill", From f7b6f4b9274619a6bb97da8b93b63f4cbdbd388c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:48:24 +0100 Subject: [PATCH 183/677] Replace functools.partial with ServiceCall.hass in knx (#133111) --- homeassistant/components/knx/services.py | 37 +++++++++++------------- 1 file changed, 17 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/knx/services.py b/homeassistant/components/knx/services.py index 113be9709ee..6c392902737 100644 --- a/homeassistant/components/knx/services.py +++ b/homeassistant/components/knx/services.py @@ -2,7 +2,6 @@ from __future__ import annotations -from functools import partial import logging from typing import TYPE_CHECKING @@ -47,14 +46,14 @@ def register_knx_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, SERVICE_KNX_SEND, - partial(service_send_to_knx_bus, hass), + service_send_to_knx_bus, schema=SERVICE_KNX_SEND_SCHEMA, ) hass.services.async_register( DOMAIN, SERVICE_KNX_READ, - partial(service_read_to_knx_bus, hass), + service_read_to_knx_bus, schema=SERVICE_KNX_READ_SCHEMA, ) @@ -62,7 +61,7 @@ def register_knx_services(hass: HomeAssistant) -> None: hass, DOMAIN, SERVICE_KNX_EVENT_REGISTER, - partial(service_event_register_modify, hass), + service_event_register_modify, schema=SERVICE_KNX_EVENT_REGISTER_SCHEMA, ) @@ -70,7 +69,7 @@ def register_knx_services(hass: HomeAssistant) -> None: hass, DOMAIN, SERVICE_KNX_EXPOSURE_REGISTER, - partial(service_exposure_register_modify, hass), + service_exposure_register_modify, schema=SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA, ) @@ -78,7 +77,7 @@ def register_knx_services(hass: HomeAssistant) -> None: hass, DOMAIN, SERVICE_RELOAD, - partial(service_reload_integration, hass), + service_reload_integration, ) @@ -103,9 +102,9 @@ SERVICE_KNX_EVENT_REGISTER_SCHEMA = vol.Schema( ) -async def service_event_register_modify(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_event_register_modify(call: ServiceCall) -> None: """Service for adding or removing a GroupAddress to the knx_event filter.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) attr_address = call.data[KNX_ADDRESS] group_addresses = list(map(parse_device_group_address, attr_address)) @@ -156,11 +155,9 @@ SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA = vol.Any( ) -async def service_exposure_register_modify( - hass: HomeAssistant, call: ServiceCall -) -> None: +async def service_exposure_register_modify(call: ServiceCall) -> None: """Service for adding or removing an exposure to KNX bus.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) group_address = call.data[KNX_ADDRESS] @@ -223,9 +220,9 @@ SERVICE_KNX_SEND_SCHEMA = vol.Any( ) -async def service_send_to_knx_bus(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_send_to_knx_bus(call: ServiceCall) -> None: """Service for sending an arbitrary KNX message to the KNX bus.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) attr_address = call.data[KNX_ADDRESS] attr_payload = call.data[SERVICE_KNX_ATTR_PAYLOAD] @@ -271,9 +268,9 @@ SERVICE_KNX_READ_SCHEMA = vol.Schema( ) -async def service_read_to_knx_bus(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_read_to_knx_bus(call: ServiceCall) -> None: """Service for sending a GroupValueRead telegram to the KNX bus.""" - knx_module = get_knx_module(hass) + knx_module = get_knx_module(call.hass) for address in call.data[KNX_ADDRESS]: telegram = Telegram( @@ -284,8 +281,8 @@ async def service_read_to_knx_bus(hass: HomeAssistant, call: ServiceCall) -> Non await knx_module.xknx.telegrams.put(telegram) -async def service_reload_integration(hass: HomeAssistant, call: ServiceCall) -> None: +async def service_reload_integration(call: ServiceCall) -> None: """Reload the integration.""" - knx_module = get_knx_module(hass) - await hass.config_entries.async_reload(knx_module.entry.entry_id) - hass.bus.async_fire(f"event_{DOMAIN}_reloaded", context=call.context) + knx_module = get_knx_module(call.hass) + await call.hass.config_entries.async_reload(knx_module.entry.entry_id) + call.hass.bus.async_fire(f"event_{DOMAIN}_reloaded", context=call.context) From 8b579d83ce32859fb054013254645571ba3c9461 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:50:10 +0100 Subject: [PATCH 184/677] Add data/data_description translation checks (#131705) --- tests/components/conftest.py | 38 ++++++++++++++++++++++ tests/components/onkyo/test_config_flow.py | 9 +++++ 2 files changed, 47 insertions(+) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 71c3b14050d..ac30d105299 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from collections.abc import AsyncGenerator, Callable, Generator +from functools import lru_cache from importlib.util import find_spec from pathlib import Path import string @@ -37,6 +38,7 @@ from homeassistant.data_entry_flow import ( from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.translation import async_get_translations +from homeassistant.util import yaml if TYPE_CHECKING: from homeassistant.components.hassio import AddonManager @@ -619,6 +621,26 @@ def ignore_translations() -> str | list[str]: return [] +@lru_cache +def _get_integration_quality_scale(integration: str) -> dict[str, Any]: + """Get the quality scale for an integration.""" + try: + return yaml.load_yaml_dict( + f"homeassistant/components/{integration}/quality_scale.yaml" + ).get("rules", {}) + except FileNotFoundError: + return {} + + +def _get_integration_quality_scale_rule(integration: str, rule: str) -> str: + """Get the quality scale for an integration.""" + quality_scale = _get_integration_quality_scale(integration) + if not quality_scale or rule not in quality_scale: + return "todo" + status = quality_scale[rule] + return status if isinstance(status, str) else status["status"] + + async def _check_config_flow_result_translations( manager: FlowManager, flow: FlowHandler, @@ -650,6 +672,9 @@ async def _check_config_flow_result_translations( setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) if result["type"] is FlowResultType.FORM: + iqs_config_flow = _get_integration_quality_scale_rule( + integration, "config-flow" + ) if step_id := result.get("step_id"): # neither title nor description are required # - title defaults to integration name @@ -664,6 +689,19 @@ async def _check_config_flow_result_translations( result["description_placeholders"], translation_required=False, ) + if iqs_config_flow == "done" and (data_schema := result["data_schema"]): + # data and data_description are compulsory + for data_key in data_schema.schema: + for header in ("data", "data_description"): + await _validate_translation( + flow.hass, + translation_errors, + category, + integration, + f"{key_prefix}step.{step_id}.{header}.{data_key}", + result["description_placeholders"], + ) + if errors := result.get("errors"): for error in errors.values(): await _validate_translation( diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py index f230ab124bd..a9d6f072559 100644 --- a/tests/components/onkyo/test_config_flow.py +++ b/tests/components/onkyo/test_config_flow.py @@ -503,6 +503,15 @@ async def test_import_success( } +@pytest.mark.parametrize( + "ignore_translations", + [ + [ # The schema is dynamically created from input sources + "component.onkyo.options.step.init.data.TV", + "component.onkyo.options.step.init.data_description.TV", + ] + ], +) async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test options flow.""" From 8cde40499768bfb3c17a63f143296d8fdbab5c0d Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 13 Dec 2024 10:05:46 +0100 Subject: [PATCH 185/677] Raise issue for deprecated imperial unit system (#130979) --- .../components/homeassistant/strings.json | 4 +++ homeassistant/core_config.py | 31 +++++++++++++++-- homeassistant/util/unit_system.py | 1 - tests/test_core_config.py | 24 +++++++++++++ tests/util/test_unit_system.py | 34 +++++++++++++++++++ 5 files changed, 91 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index 52b330bfbc8..3283d480fdd 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -10,6 +10,10 @@ "title": "The country has not been configured", "description": "No country has been configured, please update the configuration by clicking on the \"learn more\" button below." }, + "imperial_unit_system": { + "title": "The imperial unit system is deprecated", + "description": "The imperial unit system is deprecated and your system is currently using us customary. Please update your configuration to use the us customary unit system and reload the core configuration to fix this issue." + }, "deprecated_yaml": { "title": "The {integration_title} YAML configuration is being removed", "description": "Configuring {integration_title} using YAML is being removed.\n\nYour existing YAML configuration has been imported into the UI automatically.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue." diff --git a/homeassistant/core_config.py b/homeassistant/core_config.py index 430a882ecb9..38ca07e8f31 100644 --- a/homeassistant/core_config.py +++ b/homeassistant/core_config.py @@ -68,11 +68,11 @@ from .util.hass_dict import HassKey from .util.package import is_docker_env from .util.unit_system import ( _CONF_UNIT_SYSTEM_IMPERIAL, + _CONF_UNIT_SYSTEM_METRIC, _CONF_UNIT_SYSTEM_US_CUSTOMARY, METRIC_SYSTEM, UnitSystem, get_unit_system, - validate_unit_system, ) # Typing imports that create a circular dependency @@ -188,6 +188,26 @@ _CUSTOMIZE_CONFIG_SCHEMA = vol.Schema( ) +def _raise_issue_if_imperial_unit_system( + hass: HomeAssistant, config: dict[str, Any] +) -> dict[str, Any]: + if config.get(CONF_UNIT_SYSTEM) == _CONF_UNIT_SYSTEM_IMPERIAL: + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + "imperial_unit_system", + is_fixable=False, + learn_more_url="homeassistant://config/general", + severity=ir.IssueSeverity.WARNING, + translation_key="imperial_unit_system", + ) + config[CONF_UNIT_SYSTEM] = _CONF_UNIT_SYSTEM_US_CUSTOMARY + else: + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "imperial_unit_system") + + return config + + def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None: if currency not in HISTORIC_CURRENCIES: ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "historic_currency") @@ -249,7 +269,11 @@ CORE_CONFIG_SCHEMA = vol.All( CONF_ELEVATION: vol.Coerce(int), CONF_RADIUS: cv.positive_int, vol.Remove(CONF_TEMPERATURE_UNIT): cv.temperature_unit, - CONF_UNIT_SYSTEM: validate_unit_system, + CONF_UNIT_SYSTEM: vol.Any( + _CONF_UNIT_SYSTEM_METRIC, + _CONF_UNIT_SYSTEM_US_CUSTOMARY, + _CONF_UNIT_SYSTEM_IMPERIAL, + ), CONF_TIME_ZONE: cv.time_zone, vol.Optional(CONF_INTERNAL_URL): cv.url, vol.Optional(CONF_EXTERNAL_URL): cv.url, @@ -333,6 +357,9 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non # so we need to run it in an executor job. config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) + # Check if we need to raise an issue for imperial unit system + config = _raise_issue_if_imperial_unit_system(hass, config) + # Only load auth during startup. if not hasattr(hass, "auth"): if (auth_conf := config.get(CONF_AUTH_PROVIDERS)) is None: diff --git a/homeassistant/util/unit_system.py b/homeassistant/util/unit_system.py index c812dd38230..15993cbae47 100644 --- a/homeassistant/util/unit_system.py +++ b/homeassistant/util/unit_system.py @@ -233,7 +233,6 @@ def _deprecated_unit_system(value: str) -> str: """Convert deprecated unit system.""" if value == _CONF_UNIT_SYSTEM_IMPERIAL: - # need to add warning in 2023.1 return _CONF_UNIT_SYSTEM_US_CUSTOMARY return value diff --git a/tests/test_core_config.py b/tests/test_core_config.py index cd77e3608dd..dae50bae097 100644 --- a/tests/test_core_config.py +++ b/tests/test_core_config.py @@ -1080,3 +1080,27 @@ async def test_set_time_zone_deprecated(hass: HomeAssistant) -> None: ), ): await hass.config.set_time_zone("America/New_York") + + +async def test_core_config_schema_imperial_unit( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test core config schema.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Home", + "unit_system": "imperial", + "time_zone": "America/New_York", + "currency": "USD", + "country": "US", + "language": "en", + "radius": 150, + }, + ) + + issue = issue_registry.async_get_issue("homeassistant", "imperial_unit_system") + assert issue diff --git a/tests/util/test_unit_system.py b/tests/util/test_unit_system.py index b2c604acbcf..ddefe92de42 100644 --- a/tests/util/test_unit_system.py +++ b/tests/util/test_unit_system.py @@ -24,6 +24,8 @@ from homeassistant.const import ( UnitOfVolume, UnitOfVolumetricFlux, ) +from homeassistant.core import HomeAssistant +from homeassistant.core_config import async_process_ha_core_config from homeassistant.exceptions import HomeAssistantError from homeassistant.util.unit_system import ( # pylint: disable=hass-deprecated-import _CONF_UNIT_SYSTEM_IMPERIAL, @@ -877,3 +879,35 @@ def test_imperial_converted_units(device_class: SensorDeviceClass) -> None: assert (device_class, unit) not in unit_system._conversions continue assert (device_class, unit) in unit_system._conversions + + +async def test_imperial_deprecated_log_warning( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test deprecated imperial unit system logs warning.""" + await async_process_ha_core_config( + hass, + { + "latitude": 60, + "longitude": 50, + "elevation": 25, + "name": "Home", + "unit_system": "imperial", + "time_zone": "America/New_York", + "currency": "USD", + "country": "US", + "language": "en", + "radius": 150, + }, + ) + + assert hass.config.latitude == 60 + assert hass.config.longitude == 50 + assert hass.config.elevation == 25 + assert hass.config.location_name == "Home" + assert hass.config.units is US_CUSTOMARY_SYSTEM + assert hass.config.time_zone == "America/New_York" + assert hass.config.currency == "USD" + assert hass.config.country == "US" + assert hass.config.language == "en" + assert hass.config.radius == 150 From fb5cca877bead93f5313757578563743c2ed028f Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 13 Dec 2024 10:12:35 +0100 Subject: [PATCH 186/677] Fix failing CI due to Russound Rio incorrect IQS (#133118) --- homeassistant/components/russound_rio/quality_scale.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 2d396892aa8..3a5e8f9adb7 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -11,7 +11,10 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: done + config-flow: + status: todo + comment: | + The data_description fields in translations are missing. dependency-transparency: done docs-actions: status: exempt From c0ef60bb98cbde57715a4edfa7dc47d9d168aedd Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 13 Dec 2024 10:22:46 +0100 Subject: [PATCH 187/677] Bump aiowithings to 3.1.4 (#133117) --- homeassistant/components/withings/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index 57d4bafdc7b..886eb66f5e0 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/withings", "iot_class": "cloud_push", "loggers": ["aiowithings"], - "requirements": ["aiowithings==3.1.3"] + "requirements": ["aiowithings==3.1.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index cc715c895f9..66dfa359577 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -417,7 +417,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.1.3 +aiowithings==3.1.4 # homeassistant.components.yandex_transport aioymaps==1.2.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7094270a7a6..5e0705b7358 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -399,7 +399,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.1.3 +aiowithings==3.1.4 # homeassistant.components.yandex_transport aioymaps==1.2.5 From 7f3373d2337560e8fea4524bcd5140cbd53a88d0 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 13 Dec 2024 01:27:35 -0800 Subject: [PATCH 188/677] Add a quality scale for Google Tasks (#131497) Co-authored-by: Joost Lekkerkerker --- .../google_tasks/quality_scale.yaml | 78 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 78 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/google_tasks/quality_scale.yaml diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml new file mode 100644 index 00000000000..b4159b30145 --- /dev/null +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -0,0 +1,78 @@ +rules: + # Bronze + config-flow: done + brands: done + dependency-transparency: todo + common-modules: + status: exempt + comment: | + The integration has a coordinator.py and no base entities. + has-entity-name: done + action-setup: + status: exempt + comment: The integration does not register any actions. + appropriate-polling: done + test-before-configure: done + entity-event-setup: + status: exempt + comment: Integration does not subscribe to events. + unique-config-entry: done + entity-unique-id: done + docs-installation-instructions: done + docs-removal-instructions: todo + test-before-setup: + status: todo + comment: | + The integration refreshes the access token, but does not poll the API. The + setup can be changed to request the list of todo lists in setup instead + of during platform setup. + docs-high-level-description: done + config-flow-test-coverage: done + docs-actions: + status: exempt + comment: The integration does not register any actions. + runtime-data: done + + # Silver + log-when-unavailable: done + config-entry-unloading: done + reauthentication-flow: + status: todo + comment: Missing a test that reauthenticates with the wrong account + action-exceptions: todo + docs-installation-parameters: todo + integration-owner: done + parallel-updates: todo + test-coverage: + status: todo + comment: Test coverage for __init__.py is not above 95% yet + docs-configuration-parameters: todo + entity-unavailable: done + + # Gold + docs-examples: todo + discovery-update-info: todo + entity-device-class: todo + entity-translations: todo + docs-data-update: todo + entity-disabled-by-default: todo + discovery: todo + exception-translations: todo + devices: todo + docs-supported-devices: todo + icon-translations: todo + docs-known-limitations: todo + stale-devices: todo + docs-supported-functions: todo + repair-issues: todo + reconfiguration-flow: todo + entity-category: todo + dynamic-devices: todo + docs-troubleshooting: todo + diagnostics: todo + docs-use-cases: todo + + # Platinum + async-dependency: todo + strict-typing: todo + inject-websession: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index f3b285c8485..23721d31fec 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -457,7 +457,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "google_maps", "google_pubsub", "google_sheets", - "google_tasks", "google_translate", "google_travel_time", "google_wifi", From 91f7afc2c5fb9aa4a91fd8d5141838da5792d805 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Fri, 13 Dec 2024 10:40:23 +0100 Subject: [PATCH 189/677] Cookidoo reauth config flow for silver (#133110) * reauth * add check for duplicate email in reauth * fix reauth double email check * parametrize tests * check reauth double entry data as well --- .../components/cookidoo/config_flow.py | 34 +++++ .../components/cookidoo/coordinator.py | 2 +- .../components/cookidoo/manifest.json | 2 +- .../components/cookidoo/quality_scale.yaml | 2 +- .../components/cookidoo/strings.json | 12 ++ tests/components/cookidoo/test_config_flow.py | 124 ++++++++++++++++++ tests/components/cookidoo/test_init.py | 2 +- 7 files changed, 174 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/cookidoo/config_flow.py b/homeassistant/components/cookidoo/config_flow.py index ce7ad9fde87..d523de96b01 100644 --- a/homeassistant/components/cookidoo/config_flow.py +++ b/homeassistant/components/cookidoo/config_flow.py @@ -102,6 +102,40 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that informs the user that reauth is required.""" + errors: dict[str, str] = {} + + reauth_entry = self._get_reauth_entry() + + if user_input is not None: + if not ( + errors := await self.validate_input({**reauth_entry.data, **user_input}) + ): + if user_input[CONF_EMAIL] != reauth_entry.data[CONF_EMAIL]: + self._async_abort_entries_match( + {CONF_EMAIL: user_input[CONF_EMAIL]} + ) + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=self.add_suggested_values_to_schema( + data_schema=vol.Schema(AUTH_DATA_SCHEMA), + suggested_values={CONF_EMAIL: reauth_entry.data[CONF_EMAIL]}, + ), + errors=errors, + ) + async def generate_country_schema(self) -> None: """Generate country schema.""" self.COUNTRY_DATA_SCHEMA = { diff --git a/homeassistant/components/cookidoo/coordinator.py b/homeassistant/components/cookidoo/coordinator.py index 23a133ea16f..ad86d1fb9f1 100644 --- a/homeassistant/components/cookidoo/coordinator.py +++ b/homeassistant/components/cookidoo/coordinator.py @@ -63,7 +63,7 @@ class CookidooDataUpdateCoordinator(DataUpdateCoordinator[CookidooData]): translation_key="setup_request_exception", ) from e except CookidooAuthException as e: - raise UpdateFailed( + raise ConfigEntryAuthFailed( translation_domain=DOMAIN, translation_key="setup_authentication_exception", translation_placeholders={ diff --git a/homeassistant/components/cookidoo/manifest.json b/homeassistant/components/cookidoo/manifest.json index 7e9e86f9d9d..59d58200fdf 100644 --- a/homeassistant/components/cookidoo/manifest.json +++ b/homeassistant/components/cookidoo/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/cookidoo", "integration_type": "service", "iot_class": "cloud_polling", - "quality_scale": "bronze", + "quality_scale": "silver", "requirements": ["cookidoo-api==0.10.0"] } diff --git a/homeassistant/components/cookidoo/quality_scale.yaml b/homeassistant/components/cookidoo/quality_scale.yaml index 7b2bbb7592b..25069c87c46 100644 --- a/homeassistant/components/cookidoo/quality_scale.yaml +++ b/homeassistant/components/cookidoo/quality_scale.yaml @@ -38,7 +38,7 @@ rules: action-exceptions: status: done comment: Only providing todo actions - reauthentication-flow: todo + reauthentication-flow: done parallel-updates: done test-coverage: done integration-owner: done diff --git a/homeassistant/components/cookidoo/strings.json b/homeassistant/components/cookidoo/strings.json index 2c518f472d5..126205fcf2f 100644 --- a/homeassistant/components/cookidoo/strings.json +++ b/homeassistant/components/cookidoo/strings.json @@ -22,6 +22,18 @@ "data_description": { "language": "Pick your language for the Cookidoo content." } + }, + "reauth_confirm": { + "title": "Login again to Cookidoo", + "description": "Please log in to Cookidoo again to continue using this integration.", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "[%key:component::cookidoo::config::step::user::data_description::email%]", + "password": "[%key:component::cookidoo::config::step::user::data_description::password%]" + } } }, "error": { diff --git a/tests/components/cookidoo/test_config_flow.py b/tests/components/cookidoo/test_config_flow.py index 0da8afe7d07..cfdc284dbfe 100644 --- a/tests/components/cookidoo/test_config_flow.py +++ b/tests/components/cookidoo/test_config_flow.py @@ -180,3 +180,127 @@ async def test_flow_user_init_data_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_flow_reauth( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow.""" + + cookidoo_config_entry.add_to_hass(hass) + + result = await cookidoo_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert cookidoo_config_entry.data == { + CONF_EMAIL: "new-email", + CONF_PASSWORD: "new-password", + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooAuthException(), "invalid_auth"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_reauth_error_and_recover( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, + raise_error, + text_error, +) -> None: + """Test reauth flow.""" + + cookidoo_config_entry.add_to_hass(hass) + + result = await cookidoo_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_cookidoo_client.login.side_effect = raise_error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": text_error} + + mock_cookidoo_client.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "new-email", CONF_PASSWORD: "new-password"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert cookidoo_config_entry.data == { + CONF_EMAIL: "new-email", + CONF_PASSWORD: "new-password", + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("new_email", "saved_email", "result_reason"), + [ + (EMAIL, EMAIL, "reauth_successful"), + ("another-email", EMAIL, "already_configured"), + ], +) +async def test_flow_reauth_init_data_already_configured( + hass: HomeAssistant, + mock_cookidoo_client: AsyncMock, + cookidoo_config_entry: MockConfigEntry, + new_email: str, + saved_email: str, + result_reason: str, +) -> None: + """Test we abort user data set when entry is already configured.""" + + cookidoo_config_entry.add_to_hass(hass) + + another_cookidoo_config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "another-email", + CONF_PASSWORD: PASSWORD, + CONF_COUNTRY: COUNTRY, + CONF_LANGUAGE: LANGUAGE, + }, + ) + + another_cookidoo_config_entry.add_to_hass(hass) + + result = await cookidoo_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: new_email, CONF_PASSWORD: PASSWORD}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == result_reason + assert cookidoo_config_entry.data[CONF_EMAIL] == saved_email diff --git a/tests/components/cookidoo/test_init.py b/tests/components/cookidoo/test_init.py index c73295bcd96..b1b9b880526 100644 --- a/tests/components/cookidoo/test_init.py +++ b/tests/components/cookidoo/test_init.py @@ -35,7 +35,7 @@ async def test_load_unload( ("exception", "status"), [ (CookidooRequestException, ConfigEntryState.SETUP_RETRY), - (CookidooAuthException, ConfigEntryState.SETUP_RETRY), + (CookidooAuthException, ConfigEntryState.SETUP_ERROR), ], ) async def test_init_failure( From 4e5ceb3aa4309634aa5d34d4fe5f7417e1ba1025 Mon Sep 17 00:00:00 2001 From: Simon Lamon <32477463+silamon@users.noreply.github.com> Date: Wed, 11 Dec 2024 11:12:05 +0100 Subject: [PATCH 190/677] Bump python-linkplay to v0.1.1 (#132091) --- homeassistant/components/linkplay/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/linkplay/test_diagnostics.py | 6 ++++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index e74d22b8207..cc124ceb611 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["linkplay"], - "requirements": ["python-linkplay==0.0.20"], + "requirements": ["python-linkplay==0.1.1"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index a8a7185a22a..aa43af2aacd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2365,7 +2365,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay -python-linkplay==0.0.20 +python-linkplay==0.1.1 # homeassistant.components.lirc # python-lirc==1.2.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index adf1c83b236..eb971e7bca2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1892,7 +1892,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.8.1 # homeassistant.components.linkplay -python-linkplay==0.0.20 +python-linkplay==0.1.1 # homeassistant.components.matter python-matter-server==6.6.0 diff --git a/tests/components/linkplay/test_diagnostics.py b/tests/components/linkplay/test_diagnostics.py index 369142978a3..de60b7ecb3a 100644 --- a/tests/components/linkplay/test_diagnostics.py +++ b/tests/components/linkplay/test_diagnostics.py @@ -31,8 +31,10 @@ async def test_diagnostics( patch.object(LinkPlayMultiroom, "update_status", return_value=None), ): endpoints = [ - LinkPlayApiEndpoint(protocol="https", endpoint=HOST, session=None), - LinkPlayApiEndpoint(protocol="http", endpoint=HOST, session=None), + LinkPlayApiEndpoint( + protocol="https", port=443, endpoint=HOST, session=None + ), + LinkPlayApiEndpoint(protocol="http", port=80, endpoint=HOST, session=None), ] for endpoint in endpoints: mock_session.get( From 038115fea2571bcdb2214be5c304881ef11c96ea Mon Sep 17 00:00:00 2001 From: Stefano Angeleri Date: Tue, 10 Dec 2024 18:29:28 +0100 Subject: [PATCH 191/677] Bump pydaikin to 2.13.8 (#132759) --- homeassistant/components/daikin/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index f6e9cb78efb..f794d97a9ba 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/daikin", "iot_class": "local_polling", "loggers": ["pydaikin"], - "requirements": ["pydaikin==2.13.7"], + "requirements": ["pydaikin==2.13.8"], "zeroconf": ["_dkapi._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index aa43af2aacd..7bf954dd89c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1835,7 +1835,7 @@ pycsspeechtts==1.0.8 # pycups==2.0.4 # homeassistant.components.daikin -pydaikin==2.13.7 +pydaikin==2.13.8 # homeassistant.components.danfoss_air pydanfossair==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index eb971e7bca2..8f357072d97 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1485,7 +1485,7 @@ pycountry==24.6.1 pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.7 +pydaikin==2.13.8 # homeassistant.components.deako pydeako==0.6.0 From c08ffcff9b2ce458567c2d695d91af1b29c4ecd5 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 11 Dec 2024 11:52:02 -0600 Subject: [PATCH 192/677] Fix pipeline conversation language (#132896) --- .../components/assist_pipeline/pipeline.py | 12 ++- .../assist_pipeline/snapshots/test_init.ambr | 55 +++++++++++++- tests/components/assist_pipeline/test_init.py | 75 +++++++++++++++++++ .../conversation/test_default_agent.py | 47 ++++++++++++ 4 files changed, 185 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 9e9e84fb5d6..f8f6be3a40f 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -29,6 +29,7 @@ from homeassistant.components import ( from homeassistant.components.tts import ( generate_media_source_id as tts_generate_media_source_id, ) +from homeassistant.const import MATCH_ALL from homeassistant.core import Context, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent @@ -1009,12 +1010,19 @@ class PipelineRun: if self.intent_agent is None: raise RuntimeError("Recognize intent was not prepared") + if self.pipeline.conversation_language == MATCH_ALL: + # LLMs support all languages ('*') so use pipeline language for + # intent fallback. + input_language = self.pipeline.language + else: + input_language = self.pipeline.conversation_language + self.process_event( PipelineEvent( PipelineEventType.INTENT_START, { "engine": self.intent_agent, - "language": self.pipeline.conversation_language, + "language": input_language, "intent_input": intent_input, "conversation_id": conversation_id, "device_id": device_id, @@ -1029,7 +1037,7 @@ class PipelineRun: context=self.context, conversation_id=conversation_id, device_id=device_id, - language=self.pipeline.language, + language=input_language, agent_id=self.intent_agent, ) processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index 3b829e0e14a..d3241b8ac1f 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -142,7 +142,7 @@ 'data': dict({ 'code': 'no_intent_match', }), - 'language': 'en', + 'language': 'en-US', 'response_type': 'error', 'speech': dict({ 'plain': dict({ @@ -233,7 +233,7 @@ 'data': dict({ 'code': 'no_intent_match', }), - 'language': 'en', + 'language': 'en-US', 'response_type': 'error', 'speech': dict({ 'plain': dict({ @@ -387,6 +387,57 @@ }), ]) # --- +# name: test_pipeline_language_used_instead_of_conversation_language + list([ + dict({ + 'data': dict({ + 'language': 'en', + 'pipeline': , + }), + 'type': , + }), + dict({ + 'data': dict({ + 'conversation_id': None, + 'device_id': None, + 'engine': 'conversation.home_assistant', + 'intent_input': 'test input', + 'language': 'en', + 'prefer_local_intents': False, + }), + 'type': , + }), + dict({ + 'data': dict({ + 'intent_output': dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + }), + }), + }), + 'processed_locally': True, + }), + 'type': , + }), + dict({ + 'data': None, + 'type': , + }), + ]) +# --- # name: test_wake_word_detection_aborted list([ dict({ diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index b177530219e..a3e65766c34 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -23,6 +23,7 @@ from homeassistant.components.assist_pipeline.const import ( CONF_DEBUG_RECORDING_DIR, DOMAIN, ) +from homeassistant.const import MATCH_ALL from homeassistant.core import Context, HomeAssistant from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -1098,3 +1099,77 @@ async def test_prefer_local_intents( ] == "Order confirmed" ) + + +async def test_pipeline_language_used_instead_of_conversation_language( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_components, + snapshot: SnapshotAssertion, +) -> None: + """Test that the pipeline language is used when the conversation language is '*' (all languages).""" + client = await hass_ws_client(hass) + + events: list[assist_pipeline.PipelineEvent] = [] + + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline/create", + "conversation_engine": "homeassistant", + "conversation_language": MATCH_ALL, + "language": "en", + "name": "test_name", + "stt_engine": "test", + "stt_language": "en-US", + "tts_engine": "test", + "tts_language": "en-US", + "tts_voice": "Arnold Schwarzenegger", + "wake_word_entity": None, + "wake_word_id": None, + } + ) + msg = await client.receive_json() + assert msg["success"] + pipeline_id = msg["result"]["id"] + pipeline = assist_pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + intent_input="test input", + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.INTENT, + end_stage=assist_pipeline.PipelineStage.INTENT, + event_callback=events.append, + ), + ) + await pipeline_input.validate() + + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_converse", + return_value=conversation.ConversationResult( + intent.IntentResponse(pipeline.language) + ), + ) as mock_async_converse: + await pipeline_input.execute() + + # Check intent start event + assert process_events(events) == snapshot + intent_start: assist_pipeline.PipelineEvent | None = None + for event in events: + if event.type == assist_pipeline.PipelineEventType.INTENT_START: + intent_start = event + break + + assert intent_start is not None + + # Pipeline language (en) should be used instead of '*' + assert intent_start.data.get("language") == pipeline.language + + # Check input to async_converse + mock_async_converse.assert_called_once() + assert ( + mock_async_converse.call_args_list[0].kwargs.get("language") + == pipeline.language + ) diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 39ecdb7f422..56f25b62f60 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -30,6 +30,7 @@ from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, STATE_CLOSED, + STATE_OFF, STATE_ON, STATE_UNKNOWN, EntityCategory, @@ -3049,3 +3050,49 @@ async def test_entities_names_are_not_templates(hass: HomeAssistant) -> None: assert result is not None assert result.response.response_type == intent.IntentResponseType.ERROR + + +@pytest.mark.parametrize( + ("language", "light_name", "on_sentence", "off_sentence"), + [ + ("en", "test light", "turn on test light", "turn off test light"), + ("zh-cn", "卧室灯", "打开卧室灯", "关闭卧室灯"), + ("zh-hk", "睡房燈", "打開睡房燈", "關閉睡房燈"), + ("zh-tw", "臥室檯燈", "打開臥室檯燈", "關臥室檯燈"), + ], +) +@pytest.mark.usefixtures("init_components") +async def test_turn_on_off( + hass: HomeAssistant, + language: str, + light_name: str, + on_sentence: str, + off_sentence: str, +) -> None: + """Test turn on/off in multiple languages.""" + entity_id = "light.light1234" + hass.states.async_set( + entity_id, STATE_OFF, attributes={ATTR_FRIENDLY_NAME: light_name} + ) + + on_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + await conversation.async_converse( + hass, + on_sentence, + None, + Context(), + language=language, + ) + assert len(on_calls) == 1 + assert on_calls[0].data.get("entity_id") == [entity_id] + + off_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") + await conversation.async_converse( + hass, + off_sentence, + None, + Context(), + language=language, + ) + assert len(off_calls) == 1 + assert off_calls[0].data.get("entity_id") == [entity_id] From ede9c3ecd2f1c878587a48cee4650b3e74b59787 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Thu, 12 Dec 2024 05:42:00 -0500 Subject: [PATCH 193/677] fix AndroidTV logging when disconnected (#132919) --- .../components/androidtv/__init__.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/androidtv/__init__.py b/homeassistant/components/androidtv/__init__.py index 34c4212c913..199d1c362dd 100644 --- a/homeassistant/components/androidtv/__init__.py +++ b/homeassistant/components/androidtv/__init__.py @@ -135,15 +135,16 @@ async def async_connect_androidtv( ) aftv = await async_androidtv_setup( - config[CONF_HOST], - config[CONF_PORT], - adbkey, - config.get(CONF_ADB_SERVER_IP), - config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT), - state_detection_rules, - config[CONF_DEVICE_CLASS], - timeout, - signer, + host=config[CONF_HOST], + port=config[CONF_PORT], + adbkey=adbkey, + adb_server_ip=config.get(CONF_ADB_SERVER_IP), + adb_server_port=config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT), + state_detection_rules=state_detection_rules, + device_class=config[CONF_DEVICE_CLASS], + auth_timeout_s=timeout, + signer=signer, + log_errors=False, ) if not aftv.available: From 83e1353c01a2398ab627a25d647595092815e160 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Wed, 11 Dec 2024 09:40:18 -0500 Subject: [PATCH 194/677] Guard Vodafone Station updates against bad data (#132921) guard Vodafone Station updates against bad data --- homeassistant/components/vodafone_station/coordinator.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/vodafone_station/coordinator.py b/homeassistant/components/vodafone_station/coordinator.py index d2f408e355b..e95ca2b5976 100644 --- a/homeassistant/components/vodafone_station/coordinator.py +++ b/homeassistant/components/vodafone_station/coordinator.py @@ -2,6 +2,7 @@ from dataclasses import dataclass from datetime import datetime, timedelta +from json.decoder import JSONDecodeError from typing import Any from aiovodafone import VodafoneStationDevice, VodafoneStationSercommApi, exceptions @@ -107,6 +108,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): exceptions.CannotConnect, exceptions.AlreadyLogged, exceptions.GenericLoginError, + JSONDecodeError, ) as err: raise UpdateFailed(f"Error fetching data: {err!r}") from err except (ConfigEntryAuthFailed, UpdateFailed): From 31348930cc7b34a74996381616571fa98b7706d9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 12 Dec 2024 05:46:31 +0100 Subject: [PATCH 195/677] Bump led-ble to 1.1.1 (#132977) changelog: https://github.com/Bluetooth-Devices/led-ble/compare/v1.0.2...v1.1.1 --- homeassistant/components/led_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index 1d12e355a0d..4aaaebc0006 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -35,5 +35,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.20.0", "led-ble==1.0.2"] + "requirements": ["bluetooth-data-tools==1.20.0", "led-ble==1.1.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7bf954dd89c..0e50c50aea4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1280,7 +1280,7 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.0.2 +led-ble==1.1.1 # homeassistant.components.lektrico lektricowifi==0.0.43 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8f357072d97..1d1be9738f5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1076,7 +1076,7 @@ ld2410-ble==0.1.1 leaone-ble==0.1.0 # homeassistant.components.led_ble -led-ble==1.0.2 +led-ble==1.1.1 # homeassistant.components.lektrico lektricowifi==0.0.43 From b38a7186d2cbd7aff4f78172f01c21ef713b5a14 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Thu, 12 Dec 2024 02:03:05 -0600 Subject: [PATCH 196/677] Change warning to debug for VAD timeout (#132987) --- homeassistant/components/assist_pipeline/vad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index deae5b9b7b3..c7fe1bc10c7 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -140,7 +140,7 @@ class VoiceCommandSegmenter: self._timeout_seconds_left -= chunk_seconds if self._timeout_seconds_left <= 0: - _LOGGER.warning( + _LOGGER.debug( "VAD end of speech detection timed out after %s seconds", self.timeout_seconds, ) From ed03c0a294785bcb57f340e8d5389e81e61693e7 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 12 Dec 2024 13:49:17 +0100 Subject: [PATCH 197/677] Fix LaMetric config flow for cloud import path (#133039) --- homeassistant/components/lametric/config_flow.py | 5 ++++- homeassistant/components/lametric/strings.json | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/lametric/config_flow.py b/homeassistant/components/lametric/config_flow.py index 36dcdf26ed6..05c5dea77d1 100644 --- a/homeassistant/components/lametric/config_flow.py +++ b/homeassistant/components/lametric/config_flow.py @@ -249,7 +249,10 @@ class LaMetricFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): device = await lametric.device() if self.source != SOURCE_REAUTH: - await self.async_set_unique_id(device.serial_number) + await self.async_set_unique_id( + device.serial_number, + raise_on_progress=False, + ) self._abort_if_unique_id_configured( updates={CONF_HOST: lametric.host, CONF_API_KEY: lametric.api_key} ) diff --git a/homeassistant/components/lametric/strings.json b/homeassistant/components/lametric/strings.json index 87bda01e305..0fd6f5a12dc 100644 --- a/homeassistant/components/lametric/strings.json +++ b/homeassistant/components/lametric/strings.json @@ -21,8 +21,11 @@ "api_key": "You can find this API key in [devices page in your LaMetric developer account](https://developer.lametric.com/user/devices)." } }, - "user_cloud_select_device": { + "cloud_select_device": { "data": { + "device": "Device" + }, + "data_description": { "device": "Select the LaMetric device to add" } } From 73465a7aa8cb8121a3721b72d87cb18bf2da3bff Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Thu, 12 Dec 2024 19:11:07 +0100 Subject: [PATCH 198/677] Update frontend to 20241127.8 (#133066) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index bfc08c6e11e..1f9988dff38 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241127.7"] + "requirements": ["home-assistant-frontend==20241127.8"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index aef46c0ffc6..5d7df8a2ff5 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -34,7 +34,7 @@ habluetooth==3.6.0 hass-nabucasa==0.86.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241127.7 +home-assistant-frontend==20241127.8 home-assistant-intents==2024.12.9 httpx==0.27.2 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 0e50c50aea4..c862374cb16 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1130,7 +1130,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.7 +home-assistant-frontend==20241127.8 # homeassistant.components.conversation home-assistant-intents==2024.12.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1d1be9738f5..a93cc33b591 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -956,7 +956,7 @@ hole==0.8.0 holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241127.7 +home-assistant-frontend==20241127.8 # homeassistant.components.conversation home-assistant-intents==2024.12.9 From d0c00aaa67636f5f976a01974375f1286b493647 Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Fri, 13 Dec 2024 09:01:48 +0100 Subject: [PATCH 199/677] Bump pysuezV2 to 1.3.5 (#133076) --- homeassistant/components/suez_water/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/suez_water/manifest.json b/homeassistant/components/suez_water/manifest.json index 240be0f37bd..7e720a86afd 100644 --- a/homeassistant/components/suez_water/manifest.json +++ b/homeassistant/components/suez_water/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/suez_water", "iot_class": "cloud_polling", "loggers": ["pysuez", "regex"], - "requirements": ["pysuezV2==1.3.2"] + "requirements": ["pysuezV2==1.3.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index c862374cb16..b2aa310c209 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2293,7 +2293,7 @@ pysqueezebox==0.10.0 pystiebeleltron==0.0.1.dev2 # homeassistant.components.suez_water -pysuezV2==1.3.2 +pysuezV2==1.3.5 # homeassistant.components.switchbee pyswitchbee==1.8.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a93cc33b591..c567d839bbd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1850,7 +1850,7 @@ pyspeex-noise==1.0.2 pysqueezebox==0.10.0 # homeassistant.components.suez_water -pysuezV2==1.3.2 +pysuezV2==1.3.5 # homeassistant.components.switchbee pyswitchbee==1.8.3 From 01359b32c45efdf74a3cfdfd05bbb0695cc9bc27 Mon Sep 17 00:00:00 2001 From: David Bonnes Date: Fri, 13 Dec 2024 07:54:14 +0000 Subject: [PATCH 200/677] Bugfix to use evohome's new hostname (#133085) --- homeassistant/components/evohome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/evohome/manifest.json b/homeassistant/components/evohome/manifest.json index da3d197f6aa..22edadad7f4 100644 --- a/homeassistant/components/evohome/manifest.json +++ b/homeassistant/components/evohome/manifest.json @@ -6,5 +6,5 @@ "iot_class": "cloud_polling", "loggers": ["evohomeasync", "evohomeasync2"], "quality_scale": "legacy", - "requirements": ["evohome-async==0.4.20"] + "requirements": ["evohome-async==0.4.21"] } diff --git a/requirements_all.txt b/requirements_all.txt index b2aa310c209..a6fedd05938 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -878,7 +878,7 @@ eufylife-ble-client==0.1.8 # evdev==1.6.1 # homeassistant.components.evohome -evohome-async==0.4.20 +evohome-async==0.4.21 # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c567d839bbd..af98c752059 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -744,7 +744,7 @@ eternalegypt==0.0.16 eufylife-ble-client==0.1.8 # homeassistant.components.evohome -evohome-async==0.4.20 +evohome-async==0.4.21 # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 From d9bb1f603562d3990bd5c704860992747aa3a0de Mon Sep 17 00:00:00 2001 From: Brandon Rothweiler <2292715+bdr99@users.noreply.github.com> Date: Fri, 13 Dec 2024 02:46:15 -0500 Subject: [PATCH 201/677] Bump py-aosmith to 1.0.12 (#133100) --- homeassistant/components/aosmith/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/aosmith/manifest.json b/homeassistant/components/aosmith/manifest.json index eae7981d5b9..a928a6677cb 100644 --- a/homeassistant/components/aosmith/manifest.json +++ b/homeassistant/components/aosmith/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/aosmith", "iot_class": "cloud_polling", - "requirements": ["py-aosmith==1.0.11"] + "requirements": ["py-aosmith==1.0.12"] } diff --git a/requirements_all.txt b/requirements_all.txt index a6fedd05938..162c2c97079 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1672,7 +1672,7 @@ pushover_complete==1.1.1 pvo==2.1.1 # homeassistant.components.aosmith -py-aosmith==1.0.11 +py-aosmith==1.0.12 # homeassistant.components.canary py-canary==0.5.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index af98c752059..b7b33a8419e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1367,7 +1367,7 @@ pushover_complete==1.1.1 pvo==2.1.1 # homeassistant.components.aosmith -py-aosmith==1.0.11 +py-aosmith==1.0.12 # homeassistant.components.canary py-canary==0.5.4 From f9bdc295468fc19bd527e37a86be2bd59fdeaee0 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 13 Dec 2024 09:45:38 +0100 Subject: [PATCH 202/677] Bump deebot-client to 9.4.0 (#133114) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index b9315e0c1c6..271f9ee8dcd 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==9.3.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==9.4.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 162c2c97079..765e5f74bfd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -738,7 +738,7 @@ debugpy==1.8.8 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==9.3.0 +deebot-client==9.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b7b33a8419e..e744f5397ea 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -628,7 +628,7 @@ dbus-fast==2.24.3 debugpy==1.8.8 # homeassistant.components.ecovacs -deebot-client==9.3.0 +deebot-client==9.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 9a7fda5b255fc94d3fd2253e410ad8a0cf3f1ac3 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 13 Dec 2024 10:22:46 +0100 Subject: [PATCH 203/677] Bump aiowithings to 3.1.4 (#133117) --- homeassistant/components/withings/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index 57d4bafdc7b..886eb66f5e0 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/withings", "iot_class": "cloud_push", "loggers": ["aiowithings"], - "requirements": ["aiowithings==3.1.3"] + "requirements": ["aiowithings==3.1.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 765e5f74bfd..38239d22af2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -420,7 +420,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.1.3 +aiowithings==3.1.4 # homeassistant.components.yandex_transport aioymaps==1.2.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e744f5397ea..1c76684a4a1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -402,7 +402,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.1.3 +aiowithings==3.1.4 # homeassistant.components.yandex_transport aioymaps==1.2.5 From 9b83a0028514ea62537e20296ffcc0b6a5332337 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 13 Dec 2024 11:04:47 +0100 Subject: [PATCH 204/677] Bump version to 2024.12.3 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 412b4b2eb19..391a02d07b4 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 12 -PATCH_VERSION: Final = "2" +PATCH_VERSION: Final = "3" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 56347fbd31b..ef8ce79f894 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.12.2" +version = "2024.12.3" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From c0f6535d1105b7cbf00970ce0dade7cbcf597ab3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ludovic=20BOU=C3=89?= Date: Fri, 13 Dec 2024 11:11:47 +0100 Subject: [PATCH 205/677] Fix typo in `WaterHeaterEntityDescription` name (#132888) --- homeassistant/components/water_heater/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 67ce3a97fd1..cac0a365f74 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -129,7 +129,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return await hass.data[DATA_COMPONENT].async_unload_entry(entry) -class WaterHeaterEntityEntityDescription(EntityDescription, frozen_or_thawed=True): +class WaterHeaterEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes water heater entities.""" @@ -152,7 +152,7 @@ class WaterHeaterEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): {ATTR_OPERATION_LIST, ATTR_MIN_TEMP, ATTR_MAX_TEMP} ) - entity_description: WaterHeaterEntityEntityDescription + entity_description: WaterHeaterEntityDescription _attr_current_operation: str | None = None _attr_current_temperature: float | None = None _attr_is_away_mode_on: bool | None = None From 7e2d3eb482f39ad9827bbb1d3d5763ec16f5309a Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Fri, 13 Dec 2024 11:59:55 +0100 Subject: [PATCH 206/677] Add contact vip info to fritzbox_callmonitor sensor (#132913) --- .../components/fritzbox_callmonitor/base.py | 44 ++++++++++++++----- .../components/fritzbox_callmonitor/sensor.py | 27 +++++++----- .../fritzbox_callmonitor/strings.json | 3 +- 3 files changed, 52 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/fritzbox_callmonitor/base.py b/homeassistant/components/fritzbox_callmonitor/base.py index 2816880a1b2..3c8714624e7 100644 --- a/homeassistant/components/fritzbox_callmonitor/base.py +++ b/homeassistant/components/fritzbox_callmonitor/base.py @@ -3,6 +3,7 @@ from __future__ import annotations from contextlib import suppress +from dataclasses import dataclass from datetime import timedelta import logging import re @@ -19,12 +20,33 @@ _LOGGER = logging.getLogger(__name__) MIN_TIME_PHONEBOOK_UPDATE = timedelta(hours=6) +@dataclass +class Contact: + """Store details for one phonebook contact.""" + + name: str + numbers: list[str] + vip: bool + + def __init__( + self, name: str, numbers: list[str] | None = None, category: str | None = None + ) -> None: + """Initialize the class.""" + self.name = name + self.numbers = [re.sub(REGEX_NUMBER, "", nr) for nr in numbers or ()] + self.vip = category == "1" + + +unknown_contact = Contact(UNKNOWN_NAME) + + class FritzBoxPhonebook: """Connects to a FritzBox router and downloads its phone book.""" fph: FritzPhonebook phonebook_dict: dict[str, list[str]] - number_dict: dict[str, str] + contacts: list[Contact] + number_dict: dict[str, Contact] def __init__( self, @@ -56,27 +78,27 @@ class FritzBoxPhonebook: if self.phonebook_id is None: return - self.phonebook_dict = self.fph.get_all_names(self.phonebook_id) - self.number_dict = { - re.sub(REGEX_NUMBER, "", nr): name - for name, nrs in self.phonebook_dict.items() - for nr in nrs - } + self.fph.get_all_name_numbers(self.phonebook_id) + self.contacts = [ + Contact(c.name, c.numbers, getattr(c, "category", None)) + for c in self.fph.phonebook.contacts + ] + self.number_dict = {nr: c for c in self.contacts for nr in c.numbers} _LOGGER.debug("Fritz!Box phone book successfully updated") def get_phonebook_ids(self) -> list[int]: """Return list of phonebook ids.""" return self.fph.phonebook_ids # type: ignore[no-any-return] - def get_name(self, number: str) -> str: - """Return a name for a given phone number.""" + def get_contact(self, number: str) -> Contact: + """Return a contact for a given phone number.""" number = re.sub(REGEX_NUMBER, "", str(number)) with suppress(KeyError): return self.number_dict[number] if not self.prefixes: - return UNKNOWN_NAME + return unknown_contact for prefix in self.prefixes: with suppress(KeyError): @@ -84,4 +106,4 @@ class FritzBoxPhonebook: with suppress(KeyError): return self.number_dict[prefix + number.lstrip("0")] - return UNKNOWN_NAME + return unknown_contact diff --git a/homeassistant/components/fritzbox_callmonitor/sensor.py b/homeassistant/components/fritzbox_callmonitor/sensor.py index 668369c35a7..df18ae5702a 100644 --- a/homeassistant/components/fritzbox_callmonitor/sensor.py +++ b/homeassistant/components/fritzbox_callmonitor/sensor.py @@ -20,7 +20,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import FritzBoxCallMonitorConfigEntry -from .base import FritzBoxPhonebook +from .base import Contact, FritzBoxPhonebook from .const import ( ATTR_PREFIXES, CONF_PHONEBOOK, @@ -96,7 +96,7 @@ class FritzBoxCallSensor(SensorEntity): self._host = host self._port = port self._monitor: FritzBoxCallMonitor | None = None - self._attributes: dict[str, str | list[str]] = {} + self._attributes: dict[str, str | list[str] | bool] = {} self._attr_translation_placeholders = {"phonebook_name": phonebook_name} self._attr_unique_id = unique_id @@ -152,20 +152,20 @@ class FritzBoxCallSensor(SensorEntity): """Set the state.""" self._attr_native_value = state - def set_attributes(self, attributes: Mapping[str, str]) -> None: + def set_attributes(self, attributes: Mapping[str, str | bool]) -> None: """Set the state attributes.""" self._attributes = {**attributes} @property - def extra_state_attributes(self) -> dict[str, str | list[str]]: + def extra_state_attributes(self) -> dict[str, str | list[str] | bool]: """Return the state attributes.""" if self._prefixes: self._attributes[ATTR_PREFIXES] = self._prefixes return self._attributes - def number_to_name(self, number: str) -> str: - """Return a name for a given phone number.""" - return self._fritzbox_phonebook.get_name(number) + def number_to_contact(self, number: str) -> Contact: + """Return a contact for a given phone number.""" + return self._fritzbox_phonebook.get_contact(number) def update(self) -> None: """Update the phonebook if it is defined.""" @@ -225,35 +225,42 @@ class FritzBoxCallMonitor: df_in = "%d.%m.%y %H:%M:%S" df_out = "%Y-%m-%dT%H:%M:%S" isotime = datetime.strptime(line[0], df_in).strftime(df_out) + att: dict[str, str | bool] if line[1] == FritzState.RING: self._sensor.set_state(CallState.RINGING) + contact = self._sensor.number_to_contact(line[3]) att = { "type": "incoming", "from": line[3], "to": line[4], "device": line[5], "initiated": isotime, - "from_name": self._sensor.number_to_name(line[3]), + "from_name": contact.name, + "vip": contact.vip, } self._sensor.set_attributes(att) elif line[1] == FritzState.CALL: self._sensor.set_state(CallState.DIALING) + contact = self._sensor.number_to_contact(line[5]) att = { "type": "outgoing", "from": line[4], "to": line[5], "device": line[6], "initiated": isotime, - "to_name": self._sensor.number_to_name(line[5]), + "to_name": contact.name, + "vip": contact.vip, } self._sensor.set_attributes(att) elif line[1] == FritzState.CONNECT: self._sensor.set_state(CallState.TALKING) + contact = self._sensor.number_to_contact(line[4]) att = { "with": line[4], "device": line[3], "accepted": isotime, - "with_name": self._sensor.number_to_name(line[4]), + "with_name": contact.name, + "vip": contact.vip, } self._sensor.set_attributes(att) elif line[1] == FritzState.DISCONNECT: diff --git a/homeassistant/components/fritzbox_callmonitor/strings.json b/homeassistant/components/fritzbox_callmonitor/strings.json index e935549035c..437b218a8e2 100644 --- a/homeassistant/components/fritzbox_callmonitor/strings.json +++ b/homeassistant/components/fritzbox_callmonitor/strings.json @@ -78,7 +78,8 @@ "accepted": { "name": "Accepted" }, "with_name": { "name": "With name" }, "duration": { "name": "Duration" }, - "closed": { "name": "Closed" } + "closed": { "name": "Closed" }, + "vip": { "name": "Important" } } } } From 81c8d7153b7277c3ddd28af6a0870d854025b83e Mon Sep 17 00:00:00 2001 From: Martijn Russchen Date: Fri, 13 Dec 2024 12:50:50 +0100 Subject: [PATCH 207/677] Push Nibe package to 2.14.0 (#133125) --- homeassistant/components/nibe_heatpump/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nibe_heatpump/manifest.json b/homeassistant/components/nibe_heatpump/manifest.json index 407cdfcfd57..049ba905f04 100644 --- a/homeassistant/components/nibe_heatpump/manifest.json +++ b/homeassistant/components/nibe_heatpump/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/nibe_heatpump", "iot_class": "local_polling", - "requirements": ["nibe==2.13.0"] + "requirements": ["nibe==2.14.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 66dfa359577..3c2df95f57f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1462,7 +1462,7 @@ nextcord==2.6.0 nextdns==4.0.0 # homeassistant.components.nibe_heatpump -nibe==2.13.0 +nibe==2.14.0 # homeassistant.components.nice_go nice-go==0.3.10 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5e0705b7358..53be7b9893c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1225,7 +1225,7 @@ nextcord==2.6.0 nextdns==4.0.0 # homeassistant.components.nibe_heatpump -nibe==2.13.0 +nibe==2.14.0 # homeassistant.components.nice_go nice-go==0.3.10 From d65807324627b15fbbf6fd4553ab9eac67a5cd47 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Fri, 13 Dec 2024 13:01:55 +0100 Subject: [PATCH 208/677] Make Twitch sensor state and attributes translatable (#133127) --- homeassistant/components/twitch/sensor.py | 6 ++- homeassistant/components/twitch/strings.json | 42 ++++++++++++++++++++ 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/twitch/sensor.py b/homeassistant/components/twitch/sensor.py index bd5fc509989..f78d33ea461 100644 --- a/homeassistant/components/twitch/sensor.py +++ b/homeassistant/components/twitch/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from homeassistant.components.sensor import SensorEntity +from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -49,6 +49,8 @@ class TwitchSensor(CoordinatorEntity[TwitchCoordinator], SensorEntity): """Representation of a Twitch channel.""" _attr_translation_key = "channel" + _attr_device_class = SensorDeviceClass.ENUM + _attr_options = [STATE_OFFLINE, STATE_STREAMING] def __init__(self, coordinator: TwitchCoordinator, channel_id: str) -> None: """Initialize the sensor.""" @@ -82,8 +84,8 @@ class TwitchSensor(CoordinatorEntity[TwitchCoordinator], SensorEntity): ATTR_TITLE: channel.title, ATTR_STARTED_AT: channel.started_at, ATTR_VIEWERS: channel.viewers, + ATTR_SUBSCRIPTION: False, } - resp[ATTR_SUBSCRIPTION] = False if channel.subscribed is not None: resp[ATTR_SUBSCRIPTION] = channel.subscribed resp[ATTR_SUBSCRIPTION_GIFTED] = channel.subscription_gifted diff --git a/homeassistant/components/twitch/strings.json b/homeassistant/components/twitch/strings.json index bbe46526c36..7271b81e924 100644 --- a/homeassistant/components/twitch/strings.json +++ b/homeassistant/components/twitch/strings.json @@ -16,5 +16,47 @@ "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" } + }, + "entity": { + "sensor": { + "channel": { + "state": { + "streaming": "Streaming", + "offline": "Offline" + }, + "state_attributes": { + "followers": { + "name": "Followers" + }, + "game": { + "name": "Game" + }, + "title": { + "name": "Title" + }, + "started_at": { + "name": "Started at" + }, + "viewers": { + "name": "Viewers" + }, + "subscribed": { + "name": "Subscribed" + }, + "subscription_is_gifted": { + "name": "Subscription is gifted" + }, + "subscription_tier": { + "name": "Subscription tier" + }, + "following": { + "name": "Following" + }, + "following_since": { + "name": "Following since" + } + } + } + } } } From 684667e8e733136ada08de57a975ec938a44114b Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 13 Dec 2024 13:24:46 +0100 Subject: [PATCH 209/677] Update open-meteo to v0.3.2 (#133122) --- homeassistant/components/open_meteo/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/open_meteo/manifest.json b/homeassistant/components/open_meteo/manifest.json index abdb59a48d0..a2f2a724ad5 100644 --- a/homeassistant/components/open_meteo/manifest.json +++ b/homeassistant/components/open_meteo/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/open_meteo", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["open-meteo==0.3.1"] + "requirements": ["open-meteo==0.3.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3c2df95f57f..3bb1faea169 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1535,7 +1535,7 @@ onvif-zeep-async==3.1.13 open-garage==0.2.0 # homeassistant.components.open_meteo -open-meteo==0.3.1 +open-meteo==0.3.2 # homeassistant.components.openai_conversation openai==1.35.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 53be7b9893c..a4f146fbc56 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1283,7 +1283,7 @@ onvif-zeep-async==3.1.13 open-garage==0.2.0 # homeassistant.components.open_meteo -open-meteo==0.3.1 +open-meteo==0.3.2 # homeassistant.components.openai_conversation openai==1.35.7 From f816a0667cfb3761d00696a41525a146033f137e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 13:28:11 +0100 Subject: [PATCH 210/677] Reduce functools.partial with ServiceCall.hass in energyzero (#133134) --- homeassistant/components/energyzero/services.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/energyzero/services.py b/homeassistant/components/energyzero/services.py index 286735895ad..c47958b670f 100644 --- a/homeassistant/components/energyzero/services.py +++ b/homeassistant/components/energyzero/services.py @@ -83,12 +83,12 @@ def __serialize_prices(prices: Electricity | Gas) -> ServiceResponse: } -def __get_coordinator( - hass: HomeAssistant, call: ServiceCall -) -> EnergyZeroDataUpdateCoordinator: +def __get_coordinator(call: ServiceCall) -> EnergyZeroDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] - entry: EnergyZeroConfigEntry | None = hass.config_entries.async_get_entry(entry_id) + entry: EnergyZeroConfigEntry | None = call.hass.config_entries.async_get_entry( + entry_id + ) if not entry: raise ServiceValidationError( @@ -113,10 +113,9 @@ def __get_coordinator( async def __get_prices( call: ServiceCall, *, - hass: HomeAssistant, price_type: PriceType, ) -> ServiceResponse: - coordinator = __get_coordinator(hass, call) + coordinator = __get_coordinator(call) start = __get_date(call.data.get(ATTR_START)) end = __get_date(call.data.get(ATTR_END)) @@ -151,14 +150,14 @@ def async_setup_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, GAS_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.GAS), + partial(__get_prices, price_type=PriceType.GAS), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) hass.services.async_register( DOMAIN, ENERGY_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.ENERGY), + partial(__get_prices, price_type=PriceType.ENERGY), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) From c7adc984086963a23f8d7f65ed4402da19b75d6f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 13:28:54 +0100 Subject: [PATCH 211/677] Replace functools.partial with ServiceCall.hass in unifiprotect (#133131) --- .../components/unifiprotect/services.py | 93 +++++++++---------- 1 file changed, 45 insertions(+), 48 deletions(-) diff --git a/homeassistant/components/unifiprotect/services.py b/homeassistant/components/unifiprotect/services.py index 9c045164d6d..fc438240839 100644 --- a/homeassistant/components/unifiprotect/services.py +++ b/homeassistant/components/unifiprotect/services.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -import functools from typing import Any, cast from pydantic.v1 import ValidationError @@ -88,9 +87,9 @@ def _async_get_ufp_instance(hass: HomeAssistant, device_id: str) -> ProtectApiCl @callback -def _async_get_ufp_camera(hass: HomeAssistant, call: ServiceCall) -> Camera: - ref = async_extract_referenced_entity_ids(hass, call) - entity_registry = er.async_get(hass) +def _async_get_ufp_camera(call: ServiceCall) -> Camera: + ref = async_extract_referenced_entity_ids(call.hass, call) + entity_registry = er.async_get(call.hass) entity_id = ref.indirectly_referenced.pop() camera_entity = entity_registry.async_get(entity_id) @@ -98,30 +97,27 @@ def _async_get_ufp_camera(hass: HomeAssistant, call: ServiceCall) -> Camera: assert camera_entity.device_id is not None camera_mac = _async_unique_id_to_mac(camera_entity.unique_id) - instance = _async_get_ufp_instance(hass, camera_entity.device_id) + instance = _async_get_ufp_instance(call.hass, camera_entity.device_id) return cast(Camera, instance.bootstrap.get_device_from_mac(camera_mac)) @callback -def _async_get_protect_from_call( - hass: HomeAssistant, call: ServiceCall -) -> set[ProtectApiClient]: +def _async_get_protect_from_call(call: ServiceCall) -> set[ProtectApiClient]: return { - _async_get_ufp_instance(hass, device_id) + _async_get_ufp_instance(call.hass, device_id) for device_id in async_extract_referenced_entity_ids( - hass, call + call.hass, call ).referenced_devices } async def _async_service_call_nvr( - hass: HomeAssistant, call: ServiceCall, method: str, *args: Any, **kwargs: Any, ) -> None: - instances = _async_get_protect_from_call(hass, call) + instances = _async_get_protect_from_call(call) try: await asyncio.gather( *(getattr(i.bootstrap.nvr, method)(*args, **kwargs) for i in instances) @@ -130,23 +126,23 @@ async def _async_service_call_nvr( raise HomeAssistantError(str(err)) from err -async def add_doorbell_text(hass: HomeAssistant, call: ServiceCall) -> None: +async def add_doorbell_text(call: ServiceCall) -> None: """Add a custom doorbell text message.""" message: str = call.data[ATTR_MESSAGE] - await _async_service_call_nvr(hass, call, "add_custom_doorbell_message", message) + await _async_service_call_nvr(call, "add_custom_doorbell_message", message) -async def remove_doorbell_text(hass: HomeAssistant, call: ServiceCall) -> None: +async def remove_doorbell_text(call: ServiceCall) -> None: """Remove a custom doorbell text message.""" message: str = call.data[ATTR_MESSAGE] - await _async_service_call_nvr(hass, call, "remove_custom_doorbell_message", message) + await _async_service_call_nvr(call, "remove_custom_doorbell_message", message) -async def remove_privacy_zone(hass: HomeAssistant, call: ServiceCall) -> None: +async def remove_privacy_zone(call: ServiceCall) -> None: """Remove privacy zone from camera.""" name: str = call.data[ATTR_NAME] - camera = _async_get_ufp_camera(hass, call) + camera = _async_get_ufp_camera(call) remove_index: int | None = None for index, zone in enumerate(camera.privacy_zones): @@ -171,10 +167,10 @@ def _async_unique_id_to_mac(unique_id: str) -> str: return unique_id.split("_")[0] -async def set_chime_paired_doorbells(hass: HomeAssistant, call: ServiceCall) -> None: +async def set_chime_paired_doorbells(call: ServiceCall) -> None: """Set paired doorbells on chime.""" - ref = async_extract_referenced_entity_ids(hass, call) - entity_registry = er.async_get(hass) + ref = async_extract_referenced_entity_ids(call.hass, call) + entity_registry = er.async_get(call.hass) entity_id = ref.indirectly_referenced.pop() chime_button = entity_registry.async_get(entity_id) @@ -182,13 +178,13 @@ async def set_chime_paired_doorbells(hass: HomeAssistant, call: ServiceCall) -> assert chime_button.device_id is not None chime_mac = _async_unique_id_to_mac(chime_button.unique_id) - instance = _async_get_ufp_instance(hass, chime_button.device_id) + instance = _async_get_ufp_instance(call.hass, chime_button.device_id) chime = instance.bootstrap.get_device_from_mac(chime_mac) chime = cast(Chime, chime) assert chime is not None call.data = ReadOnlyDict(call.data.get("doorbells") or {}) - doorbell_refs = async_extract_referenced_entity_ids(hass, call) + doorbell_refs = async_extract_referenced_entity_ids(call.hass, call) doorbell_ids: set[str] = set() for camera_id in doorbell_refs.referenced | doorbell_refs.indirectly_referenced: doorbell_sensor = entity_registry.async_get(camera_id) @@ -209,31 +205,32 @@ async def set_chime_paired_doorbells(hass: HomeAssistant, call: ServiceCall) -> await chime.save_device(data_before_changed) +SERVICES = [ + ( + SERVICE_ADD_DOORBELL_TEXT, + add_doorbell_text, + DOORBELL_TEXT_SCHEMA, + ), + ( + SERVICE_REMOVE_DOORBELL_TEXT, + remove_doorbell_text, + DOORBELL_TEXT_SCHEMA, + ), + ( + SERVICE_SET_CHIME_PAIRED, + set_chime_paired_doorbells, + CHIME_PAIRED_SCHEMA, + ), + ( + SERVICE_REMOVE_PRIVACY_ZONE, + remove_privacy_zone, + REMOVE_PRIVACY_ZONE_SCHEMA, + ), +] + + def async_setup_services(hass: HomeAssistant) -> None: """Set up the global UniFi Protect services.""" - services = [ - ( - SERVICE_ADD_DOORBELL_TEXT, - functools.partial(add_doorbell_text, hass), - DOORBELL_TEXT_SCHEMA, - ), - ( - SERVICE_REMOVE_DOORBELL_TEXT, - functools.partial(remove_doorbell_text, hass), - DOORBELL_TEXT_SCHEMA, - ), - ( - SERVICE_SET_CHIME_PAIRED, - functools.partial(set_chime_paired_doorbells, hass), - CHIME_PAIRED_SCHEMA, - ), - ( - SERVICE_REMOVE_PRIVACY_ZONE, - functools.partial(remove_privacy_zone, hass), - REMOVE_PRIVACY_ZONE_SCHEMA, - ), - ] - for name, method, schema in services: - if hass.services.has_service(DOMAIN, name): - continue + + for name, method, schema in SERVICES: hass.services.async_register(DOMAIN, name, method, schema=schema) From 4a5e47d2f03089afe19edde020678d9e1da04bef Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 13:29:42 +0100 Subject: [PATCH 212/677] Replace functools.partial with ServiceCall.hass in tibber (#133132) --- homeassistant/components/tibber/services.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/tibber/services.py b/homeassistant/components/tibber/services.py index 5033cda11d0..938e96b9917 100644 --- a/homeassistant/components/tibber/services.py +++ b/homeassistant/components/tibber/services.py @@ -4,7 +4,6 @@ from __future__ import annotations import datetime as dt from datetime import datetime -from functools import partial from typing import Any, Final import voluptuous as vol @@ -33,8 +32,8 @@ SERVICE_SCHEMA: Final = vol.Schema( ) -async def __get_prices(call: ServiceCall, *, hass: HomeAssistant) -> ServiceResponse: - tibber_connection = hass.data[DOMAIN] +async def __get_prices(call: ServiceCall) -> ServiceResponse: + tibber_connection = call.hass.data[DOMAIN] start = __get_date(call.data.get(ATTR_START), "start") end = __get_date(call.data.get(ATTR_END), "end") @@ -94,7 +93,7 @@ def async_setup_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, PRICE_SERVICE_NAME, - partial(__get_prices, hass=hass), + __get_prices, schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) From a131497e1f9a6c9c49989b245f21ccb57e95b2bd Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 13:30:05 +0100 Subject: [PATCH 213/677] Reduce functools.partial with ServiceCall.hass in easyenergy (#133133) --- homeassistant/components/easyenergy/services.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/easyenergy/services.py b/homeassistant/components/easyenergy/services.py index cb5424496ac..f5ee89d5325 100644 --- a/homeassistant/components/easyenergy/services.py +++ b/homeassistant/components/easyenergy/services.py @@ -86,12 +86,12 @@ def __serialize_prices(prices: list[dict[str, float | datetime]]) -> ServiceResp } -def __get_coordinator( - hass: HomeAssistant, call: ServiceCall -) -> EasyEnergyDataUpdateCoordinator: +def __get_coordinator(call: ServiceCall) -> EasyEnergyDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] - entry: EasyEnergyConfigEntry | None = hass.config_entries.async_get_entry(entry_id) + entry: EasyEnergyConfigEntry | None = call.hass.config_entries.async_get_entry( + entry_id + ) if not entry: raise ServiceValidationError( @@ -116,11 +116,10 @@ def __get_coordinator( async def __get_prices( call: ServiceCall, *, - hass: HomeAssistant, price_type: PriceType, ) -> ServiceResponse: """Get prices from easyEnergy.""" - coordinator = __get_coordinator(hass, call) + coordinator = __get_coordinator(call) start = __get_date(call.data.get(ATTR_START)) end = __get_date(call.data.get(ATTR_END)) @@ -156,21 +155,21 @@ def async_setup_services(hass: HomeAssistant) -> None: hass.services.async_register( DOMAIN, GAS_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.GAS), + partial(__get_prices, price_type=PriceType.GAS), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) hass.services.async_register( DOMAIN, ENERGY_USAGE_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.ENERGY_USAGE), + partial(__get_prices, price_type=PriceType.ENERGY_USAGE), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) hass.services.async_register( DOMAIN, ENERGY_RETURN_SERVICE_NAME, - partial(__get_prices, hass=hass, price_type=PriceType.ENERGY_RETURN), + partial(__get_prices, price_type=PriceType.ENERGY_RETURN), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) From b4e065d33191930917be5ca1cf44737a3cf8c19d Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 13 Dec 2024 13:30:22 +0100 Subject: [PATCH 214/677] Bump yt-dlp to 2024.12.13 (#133129) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index 195dc678bc2..21c07607573 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2024.12.06"], + "requirements": ["yt-dlp[default]==2024.12.13"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 3bb1faea169..5adb0fb74de 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3074,7 +3074,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.06 +yt-dlp[default]==2024.12.13 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a4f146fbc56..8e5cdf569b3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2466,7 +2466,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.06 +yt-dlp[default]==2024.12.13 # homeassistant.components.zamg zamg==0.3.6 From fe46fd24bd77465e1f20acdbd7991c85375a4226 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Fri, 13 Dec 2024 13:34:17 +0100 Subject: [PATCH 215/677] Improve data description and title for Cookidoo integration (#133106) * fix data description typo for cookidoo * use placeholder for cookidoo as it is non-translatable * set title of language step * fix for reauth * fix reauth --- homeassistant/components/cookidoo/config_flow.py | 3 +++ homeassistant/components/cookidoo/strings.json | 16 ++++++++-------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/cookidoo/config_flow.py b/homeassistant/components/cookidoo/config_flow.py index d523de96b01..58e99a70907 100644 --- a/homeassistant/components/cookidoo/config_flow.py +++ b/homeassistant/components/cookidoo/config_flow.py @@ -79,6 +79,7 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): ), suggested_values=user_input, ), + description_placeholders={"cookidoo": "Cookidoo"}, errors=errors, ) @@ -99,6 +100,7 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="language", data_schema=vol.Schema(self.LANGUAGE_DATA_SCHEMA), + description_placeholders={"cookidoo": "Cookidoo"}, errors=errors, ) @@ -133,6 +135,7 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema(AUTH_DATA_SCHEMA), suggested_values={CONF_EMAIL: reauth_entry.data[CONF_EMAIL]}, ), + description_placeholders={"cookidoo": "Cookidoo"}, errors=errors, ) diff --git a/homeassistant/components/cookidoo/strings.json b/homeassistant/components/cookidoo/strings.json index 126205fcf2f..19f709ddaf8 100644 --- a/homeassistant/components/cookidoo/strings.json +++ b/homeassistant/components/cookidoo/strings.json @@ -2,30 +2,30 @@ "config": { "step": { "user": { - "title": "Login to Cookidoo", + "title": "Login to {cookidoo}", "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]", "country": "Country" }, "data_description": { - "email": "Email used access your Cookidoo account.", - "password": "Password used access your Cookidoo account.", - "country": "Pick your language for the Cookidoo content." + "email": "Email used to access your {cookidoo} account.", + "password": "Password used to access your {cookidoo} account.", + "country": "Pick your language for the {cookidoo} content." } }, "language": { - "title": "Login to Cookidoo", + "title": "Set language for {cookidoo}", "data": { "language": "[%key:common::config_flow::data::language%]" }, "data_description": { - "language": "Pick your language for the Cookidoo content." + "language": "Pick your language for the {cookidoo} content." } }, "reauth_confirm": { - "title": "Login again to Cookidoo", - "description": "Please log in to Cookidoo again to continue using this integration.", + "title": "Login again to {cookidoo}", + "description": "Please log in to {cookidoo} again to continue using this integration.", "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" From 5d8e99731954e95a5b23054e87a95c0af6e0e0eb Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Fri, 13 Dec 2024 13:49:00 +0100 Subject: [PATCH 216/677] Bump velbusaio to 2024.12.2 (#133130) * Bump velbusaio to 2024.12.2 * mistakely pushed this file --- homeassistant/components/velbus/__init__.py | 4 +++- homeassistant/components/velbus/config_flow.py | 2 +- homeassistant/components/velbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index f8426bc4130..6afcc20cc0f 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -52,7 +52,7 @@ async def velbus_connect_task( ) -> None: """Task to offload the long running connect.""" try: - await controller.connect() + await controller.start() except ConnectionError as ex: raise PlatformNotReady( f"Connection error while connecting to Velbus {entry_id}: {ex}" @@ -85,6 +85,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bo entry.data[CONF_PORT], cache_dir=hass.config.path(STORAGE_DIR, f"velbuscache-{entry.entry_id}"), ) + await controller.connect() + task = hass.async_create_task(velbus_connect_task(controller, hass, entry.entry_id)) entry.runtime_data = VelbusData(controller=controller, connect_task=task) diff --git a/homeassistant/components/velbus/config_flow.py b/homeassistant/components/velbus/config_flow.py index 0b47dfe6498..26e2fafabbc 100644 --- a/homeassistant/components/velbus/config_flow.py +++ b/homeassistant/components/velbus/config_flow.py @@ -35,7 +35,7 @@ class VelbusConfigFlow(ConfigFlow, domain=DOMAIN): """Try to connect to the velbus with the port specified.""" try: controller = velbusaio.controller.Velbus(prt) - await controller.connect(True) + await controller.connect() await controller.stop() except VelbusConnectionFailed: self._errors[CONF_PORT] = "cannot_connect" diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 600370f87d9..90981c426f9 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.12.1"], + "requirements": ["velbus-aio==2024.12.2"], "usb": [ { "vid": "10CF", diff --git a/requirements_all.txt b/requirements_all.txt index 5adb0fb74de..219094c0a28 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2943,7 +2943,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.12.1 +velbus-aio==2024.12.2 # homeassistant.components.venstar venstarcolortouch==0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8e5cdf569b3..46a7d4b29b0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2356,7 +2356,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.12.1 +velbus-aio==2024.12.2 # homeassistant.components.venstar venstarcolortouch==0.19 From 579ac01eb1b1dd4caac84e0e5b791f5cfee2fdec Mon Sep 17 00:00:00 2001 From: Guido Schmitz Date: Fri, 13 Dec 2024 15:26:02 +0100 Subject: [PATCH 217/677] Fix typos in devolo Home Network tests (#133139) --- tests/components/devolo_home_network/test_config_flow.py | 2 +- tests/components/devolo_home_network/test_update.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/devolo_home_network/test_config_flow.py b/tests/components/devolo_home_network/test_config_flow.py index 5234d0f073e..28e9059d588 100644 --- a/tests/components/devolo_home_network/test_config_flow.py +++ b/tests/components/devolo_home_network/test_config_flow.py @@ -139,7 +139,7 @@ async def test_abort_zeroconf_wrong_device(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("info") -async def test_abort_if_configued(hass: HomeAssistant) -> None: +async def test_abort_if_configured(hass: HomeAssistant) -> None: """Test we abort config flow if already configured.""" serial_number = DISCOVERY_INFO.properties["SN"] entry = MockConfigEntry( diff --git a/tests/components/devolo_home_network/test_update.py b/tests/components/devolo_home_network/test_update.py index 7f70524fa5b..4fe7a173309 100644 --- a/tests/components/devolo_home_network/test_update.py +++ b/tests/components/devolo_home_network/test_update.py @@ -141,7 +141,7 @@ async def test_device_failure_update( async def test_auth_failed(hass: HomeAssistant, mock_device: MockDevice) -> None: - """Test updating unautherized triggers the reauth flow.""" + """Test updating unauthorized triggers the reauth flow.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() state_key = f"{PLATFORM}.{device_name}_firmware" From 067daad70eea56a457360e51199efa2f24476fd5 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Fri, 13 Dec 2024 15:29:34 +0100 Subject: [PATCH 218/677] Set quality scale to silver for Powerfox integration (#133095) --- homeassistant/components/powerfox/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/powerfox/manifest.json b/homeassistant/components/powerfox/manifest.json index a7285bb213f..7083ffe8de7 100644 --- a/homeassistant/components/powerfox/manifest.json +++ b/homeassistant/components/powerfox/manifest.json @@ -5,7 +5,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/powerfox", "iot_class": "cloud_polling", - "quality_scale": "bronze", + "quality_scale": "silver", "requirements": ["powerfox==1.0.0"], "zeroconf": [ { From 8080ad14bffd4f975c1e2c6cf007891194fe1909 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 15:34:02 +0100 Subject: [PATCH 219/677] Add warning when light entities do not provide kelvin attributes or properties (#132723) --- homeassistant/components/light/__init__.py | 73 +++++++++++++++++++--- homeassistant/components/light/const.py | 5 ++ tests/components/light/common.py | 6 +- tests/components/light/test_init.py | 72 ++++++++++++++++++++- 4 files changed, 143 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 121732c918f..d4b38b498f3 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -32,6 +32,7 @@ from homeassistant.helpers.deprecation import ( ) from homeassistant.helpers.entity import ToggleEntity, ToggleEntityDescription from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.loader import bind_hass import homeassistant.util.color as color_util @@ -41,6 +42,8 @@ from .const import ( # noqa: F401 COLOR_MODES_COLOR, DATA_COMPONENT, DATA_PROFILES, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, DOMAIN, SCAN_INTERVAL, VALID_COLOR_MODES, @@ -863,17 +866,15 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): entity_description: LightEntityDescription _attr_brightness: int | None = None _attr_color_mode: ColorMode | str | None = None - _attr_color_temp: int | None = None _attr_color_temp_kelvin: int | None = None _attr_effect_list: list[str] | None = None _attr_effect: str | None = None _attr_hs_color: tuple[float, float] | None = None - # Default to the Philips Hue value that HA has always assumed - # https://developers.meethue.com/documentation/core-concepts + # We cannot set defaults without causing breaking changes until mireds + # are fully removed. Until then, developers can explicitly + # use DEFAULT_MIN_KELVIN and DEFAULT_MAX_KELVIN _attr_max_color_temp_kelvin: int | None = None _attr_min_color_temp_kelvin: int | None = None - _attr_max_mireds: int = 500 # 2000 K - _attr_min_mireds: int = 153 # 6500 K _attr_rgb_color: tuple[int, int, int] | None = None _attr_rgbw_color: tuple[int, int, int, int] | None = None _attr_rgbww_color: tuple[int, int, int, int, int] | None = None @@ -881,6 +882,11 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): _attr_supported_features: LightEntityFeature = LightEntityFeature(0) _attr_xy_color: tuple[float, float] | None = None + # Deprecated, see https://github.com/home-assistant/core/pull/79591 + _attr_color_temp: Final[int | None] = None + _attr_max_mireds: Final[int] = 500 # = 2000 K + _attr_min_mireds: Final[int] = 153 # = 6535.94 K (~ 6500 K) + __color_mode_reported = False @cached_property @@ -956,32 +962,70 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Return the rgbww color value [int, int, int, int, int].""" return self._attr_rgbww_color + @final @cached_property def color_temp(self) -> int | None: - """Return the CT color value in mireds.""" + """Return the CT color value in mireds. + + Deprecated, see https://github.com/home-assistant/core/pull/79591 + """ return self._attr_color_temp @property def color_temp_kelvin(self) -> int | None: """Return the CT color value in Kelvin.""" if self._attr_color_temp_kelvin is None and (color_temp := self.color_temp): + report_usage( + "is using mireds for current light color temperature, when " + "it should be adjusted to use the kelvin attribute " + "`_attr_color_temp_kelvin` or override the kelvin property " + "`color_temp_kelvin` (see " + "https://github.com/home-assistant/core/pull/79591)", + breaks_in_ha_version="2026.1", + core_behavior=ReportBehavior.LOG, + integration_domain=self.platform.platform_name + if self.platform + else None, + exclude_integrations={DOMAIN}, + ) return color_util.color_temperature_mired_to_kelvin(color_temp) return self._attr_color_temp_kelvin + @final @cached_property def min_mireds(self) -> int: - """Return the coldest color_temp that this light supports.""" + """Return the coldest color_temp that this light supports. + + Deprecated, see https://github.com/home-assistant/core/pull/79591 + """ return self._attr_min_mireds + @final @cached_property def max_mireds(self) -> int: - """Return the warmest color_temp that this light supports.""" + """Return the warmest color_temp that this light supports. + + Deprecated, see https://github.com/home-assistant/core/pull/79591 + """ return self._attr_max_mireds @property def min_color_temp_kelvin(self) -> int: """Return the warmest color_temp_kelvin that this light supports.""" if self._attr_min_color_temp_kelvin is None: + report_usage( + "is using mireds for warmest light color temperature, when " + "it should be adjusted to use the kelvin attribute " + "`_attr_min_color_temp_kelvin` or override the kelvin property " + "`min_color_temp_kelvin`, possibly with default DEFAULT_MIN_KELVIN " + "(see https://github.com/home-assistant/core/pull/79591)", + breaks_in_ha_version="2026.1", + core_behavior=ReportBehavior.LOG, + integration_domain=self.platform.platform_name + if self.platform + else None, + exclude_integrations={DOMAIN}, + ) return color_util.color_temperature_mired_to_kelvin(self.max_mireds) return self._attr_min_color_temp_kelvin @@ -989,6 +1033,19 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): def max_color_temp_kelvin(self) -> int: """Return the coldest color_temp_kelvin that this light supports.""" if self._attr_max_color_temp_kelvin is None: + report_usage( + "is using mireds for coldest light color temperature, when " + "it should be adjusted to use the kelvin attribute " + "`_attr_max_color_temp_kelvin` or override the kelvin property " + "`max_color_temp_kelvin`, possibly with default DEFAULT_MAX_KELVIN " + "(see https://github.com/home-assistant/core/pull/79591)", + breaks_in_ha_version="2026.1", + core_behavior=ReportBehavior.LOG, + integration_domain=self.platform.platform_name + if self.platform + else None, + exclude_integrations={DOMAIN}, + ) return color_util.color_temperature_mired_to_kelvin(self.min_mireds) return self._attr_max_color_temp_kelvin diff --git a/homeassistant/components/light/const.py b/homeassistant/components/light/const.py index 19b8734038e..d27750a950d 100644 --- a/homeassistant/components/light/const.py +++ b/homeassistant/components/light/const.py @@ -66,3 +66,8 @@ COLOR_MODES_COLOR = { ColorMode.RGBWW, ColorMode.XY, } + +# Default to the Philips Hue value that HA has always assumed +# https://developers.meethue.com/documentation/core-concepts +DEFAULT_MIN_KELVIN = 2000 # 500 mireds +DEFAULT_MAX_KELVIN = 6535 # 153 mireds diff --git a/tests/components/light/common.py b/tests/components/light/common.py index d696c7ab8cf..b29ac0c7c89 100644 --- a/tests/components/light/common.py +++ b/tests/components/light/common.py @@ -21,6 +21,8 @@ from homeassistant.components.light import ( ATTR_TRANSITION, ATTR_WHITE, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, DOMAIN, ColorMode, LightEntity, @@ -153,8 +155,8 @@ TURN_ON_ARG_TO_COLOR_MODE = { class MockLight(MockToggleEntity, LightEntity): """Mock light class.""" - _attr_max_color_temp_kelvin = 6500 - _attr_min_color_temp_kelvin = 2000 + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN supported_features = LightEntityFeature(0) brightness = None diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index bf09774073b..713ce553ae6 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -20,6 +20,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, Unauthorized +from homeassistant.helpers import frame from homeassistant.setup import async_setup_component import homeassistant.util.color as color_util @@ -1209,7 +1210,7 @@ async def test_light_state_off(hass: HomeAssistant) -> None: "hs_color": None, "rgb_color": None, "xy_color": None, - "max_color_temp_kelvin": 6500, + "max_color_temp_kelvin": 6535, "max_mireds": 500, "min_color_temp_kelvin": 2000, "min_mireds": 153, @@ -1842,7 +1843,7 @@ async def test_light_service_call_color_temp_conversion(hass: HomeAssistant) -> assert entity1.min_mireds == 153 assert entity1.max_mireds == 500 assert entity1.min_color_temp_kelvin == 2000 - assert entity1.max_color_temp_kelvin == 6500 + assert entity1.max_color_temp_kelvin == 6535 assert await async_setup_component(hass, "light", {"light": {"platform": "test"}}) await hass.async_block_till_done() @@ -1855,7 +1856,7 @@ async def test_light_service_call_color_temp_conversion(hass: HomeAssistant) -> assert state.attributes["min_mireds"] == 153 assert state.attributes["max_mireds"] == 500 assert state.attributes["min_color_temp_kelvin"] == 2000 - assert state.attributes["max_color_temp_kelvin"] == 6500 + assert state.attributes["max_color_temp_kelvin"] == 6535 state = hass.states.get(entity1.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.RGBWW] @@ -2547,6 +2548,71 @@ def test_report_invalid_color_modes( assert (expected_warning in caplog.text) is warning_expected +@pytest.mark.parametrize( + ("attributes", "expected_warnings", "expected_values"), + [ + ( + { + "_attr_color_temp_kelvin": 4000, + "_attr_min_color_temp_kelvin": 3000, + "_attr_max_color_temp_kelvin": 5000, + }, + {"current": False, "warmest": False, "coldest": False}, + # Just highlighting that the attributes match the + # converted kelvin values, not the mired properties + (3000, 4000, 5000, 200, 250, 333, 153, None, 500), + ), + ( + {"_attr_color_temp": 350, "_attr_min_mireds": 300, "_attr_max_mireds": 400}, + {"current": True, "warmest": True, "coldest": True}, + (2500, 2857, 3333, 300, 350, 400, 300, 350, 400), + ), + ( + {}, + {"current": False, "warmest": True, "coldest": True}, + (2000, None, 6535, 153, None, 500, 153, None, 500), + ), + ], + ids=["with_kelvin", "with_mired_values", "with_mired_defaults"], +) +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +def test_missing_kelvin_property_warnings( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + attributes: dict[str, int | None], + expected_warnings: dict[str, bool], + expected_values: tuple[int, int | None, int], +) -> None: + """Test missing kelvin properties.""" + + class MockLightEntityEntity(light.LightEntity): + _attr_color_mode = light.ColorMode.COLOR_TEMP + _attr_is_on = True + _attr_supported_features = light.LightEntityFeature.EFFECT + _attr_supported_color_modes = {light.ColorMode.COLOR_TEMP} + platform = MockEntityPlatform(hass, platform_name="test") + + entity = MockLightEntityEntity() + for k, v in attributes.items(): + setattr(entity, k, v) + + state = entity._async_calculate_state() + for warning, expected in expected_warnings.items(): + assert ( + f"is using mireds for {warning} light color temperature" in caplog.text + ) is expected, f"Expected {expected} for '{warning}'" + + assert state.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN] == expected_values[0] + assert state.attributes[light.ATTR_COLOR_TEMP_KELVIN] == expected_values[1] + assert state.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN] == expected_values[2] + assert state.attributes[light.ATTR_MIN_MIREDS] == expected_values[3] + assert state.attributes[light.ATTR_COLOR_TEMP] == expected_values[4] + assert state.attributes[light.ATTR_MAX_MIREDS] == expected_values[5] + assert entity.min_mireds == expected_values[6] + assert entity.color_temp == expected_values[7] + assert entity.max_mireds == expected_values[8] + + @pytest.mark.parametrize( "module", [light], From d6c81830a41d4904127725f33a338a80de8839ad Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Fri, 13 Dec 2024 15:42:40 +0100 Subject: [PATCH 220/677] Fix missing password for slide_local (#133142) --- homeassistant/components/slide_local/coordinator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/slide_local/coordinator.py b/homeassistant/components/slide_local/coordinator.py index c7542a4b813..e5311967198 100644 --- a/homeassistant/components/slide_local/coordinator.py +++ b/homeassistant/components/slide_local/coordinator.py @@ -47,7 +47,7 @@ class SlideCoordinator(DataUpdateCoordinator[dict[str, Any]]): self.api_version = entry.data[CONF_API_VERSION] self.mac = entry.data[CONF_MAC] self.host = entry.data[CONF_HOST] - self.password = entry.data[CONF_PASSWORD] + self.password = entry.data[CONF_PASSWORD] if self.api_version == 1 else "" async def _async_setup(self) -> None: """Do initialization logic for Slide coordinator.""" From 5f91676df07bd4b9ff355564f3018dfc6b99fbe3 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Fri, 13 Dec 2024 16:02:13 +0100 Subject: [PATCH 221/677] Bump PyViCare to 2.38.0 (#133126) --- homeassistant/components/vicare/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 8ce996ab81d..0bb5594e829 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.35.0"] + "requirements": ["PyViCare==2.38.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 219094c0a28..07261f2673f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -100,7 +100,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.35.0 +PyViCare==2.38.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 46a7d4b29b0..4b39c915e97 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -94,7 +94,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.35.0 +PyViCare==2.38.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 From f03f24f0361e93baa6d68971abff142c3e78ec05 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Fri, 13 Dec 2024 16:05:20 +0100 Subject: [PATCH 222/677] Velbus test before setup (#133069) * Velbus test before setup * Update homeassistant/components/velbus/__init__.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Add the connect named argument to make it clear we are testing the connection * Correctly cleanup after the test * Sync code for velbusaio 2024.12.2 * follow up * rename connect_task to scan_task --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/velbus/__init__.py | 18 +++++++++++------- .../components/velbus/binary_sensor.py | 2 +- homeassistant/components/velbus/button.py | 2 +- homeassistant/components/velbus/climate.py | 2 +- homeassistant/components/velbus/cover.py | 2 +- homeassistant/components/velbus/light.py | 2 +- .../components/velbus/quality_scale.yaml | 2 +- homeassistant/components/velbus/select.py | 2 +- homeassistant/components/velbus/sensor.py | 2 +- homeassistant/components/velbus/switch.py | 2 +- 10 files changed, 20 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index 6afcc20cc0f..ad1c35a124b 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -9,11 +9,12 @@ import os import shutil from velbusaio.controller import Velbus +from velbusaio.exceptions import VelbusConnectionFailed from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PORT, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import PlatformNotReady +from homeassistant.exceptions import ConfigEntryNotReady, PlatformNotReady from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.storage import STORAGE_DIR from homeassistant.helpers.typing import ConfigType @@ -44,13 +45,13 @@ class VelbusData: """Runtime data for the Velbus config entry.""" controller: Velbus - connect_task: asyncio.Task + scan_task: asyncio.Task -async def velbus_connect_task( +async def velbus_scan_task( controller: Velbus, hass: HomeAssistant, entry_id: str ) -> None: - """Task to offload the long running connect.""" + """Task to offload the long running scan.""" try: await controller.start() except ConnectionError as ex: @@ -85,10 +86,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: VelbusConfigEntry) -> bo entry.data[CONF_PORT], cache_dir=hass.config.path(STORAGE_DIR, f"velbuscache-{entry.entry_id}"), ) - await controller.connect() + try: + await controller.connect() + except VelbusConnectionFailed as error: + raise ConfigEntryNotReady("Cannot connect to Velbus") from error - task = hass.async_create_task(velbus_connect_task(controller, hass, entry.entry_id)) - entry.runtime_data = VelbusData(controller=controller, connect_task=task) + task = hass.async_create_task(velbus_scan_task(controller, hass, entry.entry_id)) + entry.runtime_data = VelbusData(controller=controller, scan_task=task) _migrate_device_identifiers(hass, entry.entry_id) diff --git a/homeassistant/components/velbus/binary_sensor.py b/homeassistant/components/velbus/binary_sensor.py index dd65ff7d50d..584f28e394a 100644 --- a/homeassistant/components/velbus/binary_sensor.py +++ b/homeassistant/components/velbus/binary_sensor.py @@ -16,7 +16,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusBinarySensor(channel) for channel in entry.runtime_data.controller.get_all_binary_sensor() diff --git a/homeassistant/components/velbus/button.py b/homeassistant/components/velbus/button.py index 2b908c188b8..910ae59b69e 100644 --- a/homeassistant/components/velbus/button.py +++ b/homeassistant/components/velbus/button.py @@ -22,7 +22,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusButton(channel) for channel in entry.runtime_data.controller.get_all_button() diff --git a/homeassistant/components/velbus/climate.py b/homeassistant/components/velbus/climate.py index fa8391d4199..e9128ef7de1 100644 --- a/homeassistant/components/velbus/climate.py +++ b/homeassistant/components/velbus/climate.py @@ -27,7 +27,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusClimate(channel) for channel in entry.runtime_data.controller.get_all_climate() diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 7850e7b1895..9257dd3f36f 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -24,7 +24,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusCover(channel) for channel in entry.runtime_data.controller.get_all_cover() diff --git a/homeassistant/components/velbus/light.py b/homeassistant/components/velbus/light.py index 0df4f70d753..afe3104aa9a 100644 --- a/homeassistant/components/velbus/light.py +++ b/homeassistant/components/velbus/light.py @@ -35,7 +35,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task entities: list[Entity] = [ VelbusLight(channel) for channel in entry.runtime_data.controller.get_all_light() diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index ab2df68f973..37e55fee19c 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -25,7 +25,7 @@ rules: has-entity-name: todo runtime-data: done test-before-configure: done - test-before-setup: todo + test-before-setup: done unique-config-entry: status: todo comment: | diff --git a/homeassistant/components/velbus/select.py b/homeassistant/components/velbus/select.py index f0ad509270c..c0a0a5f532d 100644 --- a/homeassistant/components/velbus/select.py +++ b/homeassistant/components/velbus/select.py @@ -17,7 +17,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus select based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusSelect(channel) for channel in entry.runtime_data.controller.get_all_select() diff --git a/homeassistant/components/velbus/sensor.py b/homeassistant/components/velbus/sensor.py index 598287839c1..2c341ea851d 100644 --- a/homeassistant/components/velbus/sensor.py +++ b/homeassistant/components/velbus/sensor.py @@ -22,7 +22,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task entities = [] for channel in entry.runtime_data.controller.get_all_sensor(): entities.append(VelbusSensor(channel)) diff --git a/homeassistant/components/velbus/switch.py b/homeassistant/components/velbus/switch.py index f3bd009d25e..dccb0a02ffa 100644 --- a/homeassistant/components/velbus/switch.py +++ b/homeassistant/components/velbus/switch.py @@ -18,7 +18,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Velbus switch based on config_entry.""" - await entry.runtime_data.connect_task + await entry.runtime_data.scan_task async_add_entities( VelbusSwitch(channel) for channel in entry.runtime_data.controller.get_all_switch() From 97da8481d282eea927dcc26fd36a0e75f9c42214 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Fri, 13 Dec 2024 16:11:45 +0100 Subject: [PATCH 223/677] Add reconfigure flow to MQTT (#132246) * Add reconfigure flow for MQTT integration * Add test and translation strings * Update quality scale configuration * Do not cache ConfigEntry in flow * Make sorce condition explictit * Rework from suggested changes * Do not allow reconfigure_entry and reconfigure_entry_data to be `None` --- homeassistant/components/mqtt/config_flow.py | 34 +++++++++-- .../components/mqtt/quality_scale.yaml | 4 +- homeassistant/components/mqtt/strings.json | 1 + tests/components/mqtt/test_config_flow.py | 56 +++++++++++++++++++ 4 files changed, 88 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index 34d43ad87f3..ad3f3d35457 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -18,6 +18,7 @@ import voluptuous as vol from homeassistant.components.file_upload import process_uploaded_file from homeassistant.components.hassio import AddonError, AddonManager, AddonState from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, @@ -469,24 +470,41 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} fields: OrderedDict[Any, Any] = OrderedDict() validated_user_input: dict[str, Any] = {} + broker_config: dict[str, Any] = {} + if is_reconfigure := (self.source == SOURCE_RECONFIGURE): + reconfigure_entry = self._get_reconfigure_entry() if await async_get_broker_settings( self, fields, - None, + reconfigure_entry.data if is_reconfigure else None, user_input, validated_user_input, errors, ): + if is_reconfigure: + broker_config.update( + update_password_from_user_input( + reconfigure_entry.data.get(CONF_PASSWORD), validated_user_input + ), + ) + else: + broker_config = validated_user_input + can_connect = await self.hass.async_add_executor_job( try_connection, - validated_user_input, + broker_config, ) if can_connect: + if is_reconfigure: + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates=broker_config, + ) validated_user_input[CONF_DISCOVERY] = DEFAULT_DISCOVERY return self.async_create_entry( - title=validated_user_input[CONF_BROKER], - data=validated_user_input, + title=broker_config[CONF_BROKER], + data=broker_config, ) errors["base"] = "cannot_connect" @@ -495,6 +513,12 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): step_id="broker", data_schema=vol.Schema(fields), errors=errors ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + return await self.async_step_broker() + async def async_step_hassio( self, discovery_info: HassioServiceInfo ) -> ConfigFlowResult: @@ -547,7 +571,7 @@ class MQTTOptionsFlowHandler(OptionsFlow): def __init__(self) -> None: """Initialize MQTT options flow.""" - self.broker_config: dict[str, str | int] = {} + self.broker_config: dict[str, Any] = {} async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the MQTT options.""" diff --git a/homeassistant/components/mqtt/quality_scale.yaml b/homeassistant/components/mqtt/quality_scale.yaml index d1730d8d2fe..f31d3e25d15 100644 --- a/homeassistant/components/mqtt/quality_scale.yaml +++ b/homeassistant/components/mqtt/quality_scale.yaml @@ -90,9 +90,9 @@ rules: This is not possible because the integrations generates entities based on a user supplied config or discovery. reconfiguration-flow: - status: exempt + status: done comment: > - This integration is reconfigured via options flow. + This integration can also be reconfigured via options flow. dynamic-devices: status: done comment: | diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 4d23007e51b..c062c111487 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -101,6 +101,7 @@ "addon_connection_failed": "Failed to connect to the {addon} add-on. Check the add-on status and try again later.", "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" }, "error": { diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index e99063b088b..fc1221956de 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -2216,3 +2216,59 @@ async def test_change_websockets_transport_to_tcp( mqtt.CONF_DISCOVERY: True, mqtt.CONF_DISCOVERY_PREFIX: "homeassistant_test", } + + +@pytest.mark.usefixtures("mock_ssl_context", "mock_process_uploaded_file") +@pytest.mark.parametrize( + "mqtt_config_entry_data", + [ + { + mqtt.CONF_BROKER: "test-broker", + CONF_PORT: 1234, + mqtt.CONF_TRANSPORT: "websockets", + mqtt.CONF_WS_HEADERS: {"header_1": "custom_header1"}, + mqtt.CONF_WS_PATH: "/some_path", + } + ], +) +async def test_reconfigure_flow_form( + hass: HomeAssistant, + mock_try_connection: MagicMock, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test reconfigure flow.""" + await mqtt_mock_entry() + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + result = await hass.config_entries.flow.async_init( + mqtt.DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + "show_advanced_options": True, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "broker" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_BROKER: "10.10.10,10", + CONF_PORT: 1234, + mqtt.CONF_TRANSPORT: "websockets", + mqtt.CONF_WS_HEADERS: '{"header_1": "custom_header1"}', + mqtt.CONF_WS_PATH: "/some_new_path", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + mqtt.CONF_BROKER: "10.10.10,10", + CONF_PORT: 1234, + mqtt.CONF_TRANSPORT: "websockets", + mqtt.CONF_WS_HEADERS: {"header_1": "custom_header1"}, + mqtt.CONF_WS_PATH: "/some_new_path", + } + await hass.async_block_till_done(wait_background_tasks=True) From 1fbe880c5fac6554128d4d2d4630c984adb8412c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 13 Dec 2024 16:52:47 +0100 Subject: [PATCH 224/677] Deprecate light constants (#132680) * Deprecate light constants * Reference deprecated values in MQTT light * Reference deprecated values in test_recorder * Adjust * Adjust * Add specific test --- homeassistant/components/light/__init__.py | 104 +++++++++++------- .../components/light/reproduce_state.py | 11 +- .../components/mqtt/light/schema_basic.py | 12 +- tests/components/light/test_init.py | 87 ++++++++++++++- tests/components/light/test_recorder.py | 12 +- 5 files changed, 168 insertions(+), 58 deletions(-) diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index d4b38b498f3..33bd259469b 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -186,16 +186,26 @@ ATTR_RGBW_COLOR = "rgbw_color" ATTR_RGBWW_COLOR = "rgbww_color" ATTR_XY_COLOR = "xy_color" ATTR_HS_COLOR = "hs_color" -ATTR_COLOR_TEMP = "color_temp" # Deprecated in HA Core 2022.11 -ATTR_KELVIN = "kelvin" # Deprecated in HA Core 2022.11 -ATTR_MIN_MIREDS = "min_mireds" # Deprecated in HA Core 2022.11 -ATTR_MAX_MIREDS = "max_mireds" # Deprecated in HA Core 2022.11 ATTR_COLOR_TEMP_KELVIN = "color_temp_kelvin" ATTR_MIN_COLOR_TEMP_KELVIN = "min_color_temp_kelvin" ATTR_MAX_COLOR_TEMP_KELVIN = "max_color_temp_kelvin" ATTR_COLOR_NAME = "color_name" ATTR_WHITE = "white" +# Deprecated in HA Core 2022.11 +_DEPRECATED_ATTR_COLOR_TEMP: Final = DeprecatedConstant( + "color_temp", "kelvin equivalent (ATTR_COLOR_TEMP_KELVIN)", "2026.1" +) +_DEPRECATED_ATTR_KELVIN: Final = DeprecatedConstant( + "kelvin", "ATTR_COLOR_TEMP_KELVIN", "2026.1" +) +_DEPRECATED_ATTR_MIN_MIREDS: Final = DeprecatedConstant( + "min_mireds", "kelvin equivalent (ATTR_MAX_COLOR_TEMP_KELVIN)", "2026.1" +) +_DEPRECATED_ATTR_MAX_MIREDS: Final = DeprecatedConstant( + "max_mireds", "kelvin equivalent (ATTR_MIN_COLOR_TEMP_KELVIN)", "2026.1" +) + # Brightness of the light, 0..255 or percentage ATTR_BRIGHTNESS = "brightness" ATTR_BRIGHTNESS_PCT = "brightness_pct" @@ -240,11 +250,11 @@ LIGHT_TURN_ON_SCHEMA: VolDictType = { vol.Exclusive(ATTR_BRIGHTNESS_STEP, ATTR_BRIGHTNESS): VALID_BRIGHTNESS_STEP, vol.Exclusive(ATTR_BRIGHTNESS_STEP_PCT, ATTR_BRIGHTNESS): VALID_BRIGHTNESS_STEP_PCT, vol.Exclusive(ATTR_COLOR_NAME, COLOR_GROUP): cv.string, - vol.Exclusive(ATTR_COLOR_TEMP, COLOR_GROUP): vol.All( + vol.Exclusive(_DEPRECATED_ATTR_COLOR_TEMP.value, COLOR_GROUP): vol.All( vol.Coerce(int), vol.Range(min=1) ), vol.Exclusive(ATTR_COLOR_TEMP_KELVIN, COLOR_GROUP): cv.positive_int, - vol.Exclusive(ATTR_KELVIN, COLOR_GROUP): cv.positive_int, + vol.Exclusive(_DEPRECATED_ATTR_KELVIN.value, COLOR_GROUP): cv.positive_int, vol.Exclusive(ATTR_HS_COLOR, COLOR_GROUP): vol.All( vol.Coerce(tuple), vol.ExactSequence( @@ -307,19 +317,29 @@ def preprocess_turn_on_alternatives( _LOGGER.warning("Got unknown color %s, falling back to white", color_name) params[ATTR_RGB_COLOR] = (255, 255, 255) - if (mired := params.pop(ATTR_COLOR_TEMP, None)) is not None: + if (mired := params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value, None)) is not None: + _LOGGER.warning( + "Got `color_temp` argument in `turn_on` service, which is deprecated " + "and will break in Home Assistant 2026.1, please use " + "`color_temp_kelvin` argument" + ) kelvin = color_util.color_temperature_mired_to_kelvin(mired) - params[ATTR_COLOR_TEMP] = int(mired) + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = int(mired) params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin) - if (kelvin := params.pop(ATTR_KELVIN, None)) is not None: + if (kelvin := params.pop(_DEPRECATED_ATTR_KELVIN.value, None)) is not None: + _LOGGER.warning( + "Got `kelvin` argument in `turn_on` service, which is deprecated " + "and will break in Home Assistant 2026.1, please use " + "`color_temp_kelvin` argument" + ) mired = color_util.color_temperature_kelvin_to_mired(kelvin) - params[ATTR_COLOR_TEMP] = int(mired) + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = int(mired) params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin) if (kelvin := params.pop(ATTR_COLOR_TEMP_KELVIN, None)) is not None: mired = color_util.color_temperature_kelvin_to_mired(kelvin) - params[ATTR_COLOR_TEMP] = int(mired) + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = int(mired) params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin) brightness_pct = params.pop(ATTR_BRIGHTNESS_PCT, None) @@ -361,7 +381,7 @@ def filter_turn_on_params(light: LightEntity, params: dict[str, Any]) -> dict[st if not brightness_supported(supported_color_modes): params.pop(ATTR_BRIGHTNESS, None) if ColorMode.COLOR_TEMP not in supported_color_modes: - params.pop(ATTR_COLOR_TEMP, None) + params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value, None) params.pop(ATTR_COLOR_TEMP_KELVIN, None) if ColorMode.HS not in supported_color_modes: params.pop(ATTR_HS_COLOR, None) @@ -443,7 +463,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: and ColorMode.COLOR_TEMP not in supported_color_modes and ColorMode.RGBWW in supported_color_modes ): - params.pop(ATTR_COLOR_TEMP) + params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value) color_temp = params.pop(ATTR_COLOR_TEMP_KELVIN) brightness = params.get(ATTR_BRIGHTNESS, light.brightness) params[ATTR_RGBWW_COLOR] = color_util.color_temperature_to_rgbww( @@ -453,7 +473,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: light.max_color_temp_kelvin, ) elif ColorMode.COLOR_TEMP not in legacy_supported_color_modes: - params.pop(ATTR_COLOR_TEMP) + params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value) color_temp = params.pop(ATTR_COLOR_TEMP_KELVIN) if color_supported(legacy_supported_color_modes): params[ATTR_HS_COLOR] = color_util.color_temperature_to_hs( @@ -500,8 +520,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ATTR_RGB_COLOR in params and ColorMode.RGB not in supported_color_modes: rgb_color = params.pop(ATTR_RGB_COLOR) @@ -523,8 +545,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ATTR_XY_COLOR in params and ColorMode.XY not in supported_color_modes: xy_color = params.pop(ATTR_XY_COLOR) @@ -544,8 +568,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ATTR_RGBW_COLOR in params and ColorMode.RGBW not in supported_color_modes: rgbw_color = params.pop(ATTR_RGBW_COLOR) @@ -565,8 +591,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) elif ( ATTR_RGBWW_COLOR in params and ColorMode.RGBWW not in supported_color_modes @@ -589,8 +617,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: params[ATTR_COLOR_TEMP_KELVIN] = color_util.color_xy_to_temperature( *xy_color ) - params[ATTR_COLOR_TEMP] = color_util.color_temperature_kelvin_to_mired( - params[ATTR_COLOR_TEMP_KELVIN] + params[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( + color_util.color_temperature_kelvin_to_mired( + params[ATTR_COLOR_TEMP_KELVIN] + ) ) # If white is set to True, set it to the light's brightness @@ -798,7 +828,7 @@ class Profiles: color_attributes = ( ATTR_COLOR_NAME, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, ATTR_HS_COLOR, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -846,13 +876,13 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): { ATTR_SUPPORTED_COLOR_MODES, ATTR_EFFECT_LIST, - ATTR_MIN_MIREDS, - ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS.value, + _DEPRECATED_ATTR_MAX_MIREDS.value, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, @@ -1072,16 +1102,16 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): data[ATTR_MIN_COLOR_TEMP_KELVIN] = min_color_temp_kelvin data[ATTR_MAX_COLOR_TEMP_KELVIN] = max_color_temp_kelvin if not max_color_temp_kelvin: - data[ATTR_MIN_MIREDS] = None + data[_DEPRECATED_ATTR_MIN_MIREDS.value] = None else: - data[ATTR_MIN_MIREDS] = color_util.color_temperature_kelvin_to_mired( - max_color_temp_kelvin + data[_DEPRECATED_ATTR_MIN_MIREDS.value] = ( + color_util.color_temperature_kelvin_to_mired(max_color_temp_kelvin) ) if not min_color_temp_kelvin: - data[ATTR_MAX_MIREDS] = None + data[_DEPRECATED_ATTR_MAX_MIREDS.value] = None else: - data[ATTR_MAX_MIREDS] = color_util.color_temperature_kelvin_to_mired( - min_color_temp_kelvin + data[_DEPRECATED_ATTR_MAX_MIREDS.value] = ( + color_util.color_temperature_kelvin_to_mired(min_color_temp_kelvin) ) if LightEntityFeature.EFFECT in supported_features: data[ATTR_EFFECT_LIST] = self.effect_list @@ -1254,14 +1284,14 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): color_temp_kelvin = self.color_temp_kelvin data[ATTR_COLOR_TEMP_KELVIN] = color_temp_kelvin if color_temp_kelvin: - data[ATTR_COLOR_TEMP] = ( + data[_DEPRECATED_ATTR_COLOR_TEMP.value] = ( color_util.color_temperature_kelvin_to_mired(color_temp_kelvin) ) else: - data[ATTR_COLOR_TEMP] = None + data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None else: data[ATTR_COLOR_TEMP_KELVIN] = None - data[ATTR_COLOR_TEMP] = None + data[_DEPRECATED_ATTR_COLOR_TEMP.value] = None if color_supported(legacy_supported_color_modes) or color_temp_supported( legacy_supported_color_modes diff --git a/homeassistant/components/light/reproduce_state.py b/homeassistant/components/light/reproduce_state.py index a89209eb426..4e994ab791d 100644 --- a/homeassistant/components/light/reproduce_state.py +++ b/homeassistant/components/light/reproduce_state.py @@ -18,9 +18,9 @@ from homeassistant.core import Context, HomeAssistant, State from homeassistant.util import color as color_util from . import ( + _DEPRECATED_ATTR_COLOR_TEMP, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, @@ -41,7 +41,7 @@ ATTR_GROUP = [ATTR_BRIGHTNESS, ATTR_EFFECT] COLOR_GROUP = [ ATTR_HS_COLOR, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, ATTR_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -129,7 +129,12 @@ async def _async_reproduce_state( if (cm_attr_state := state.attributes.get(cm_attr.state_attr)) is None: if ( color_mode != ColorMode.COLOR_TEMP - or (mireds := state.attributes.get(ATTR_COLOR_TEMP)) is None + or ( + mireds := state.attributes.get( + _DEPRECATED_ATTR_COLOR_TEMP.value + ) + ) + is None ): _LOGGER.warning( "Color mode %s specified but attribute %s missing for: %s", diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index 9cc50daa329..635c552f37e 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -9,17 +9,17 @@ from typing import Any, cast import voluptuous as vol from homeassistant.components.light import ( + _DEPRECATED_ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, - ATTR_MIN_MIREDS, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -115,15 +115,15 @@ MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset( { ATTR_COLOR_MODE, ATTR_BRIGHTNESS, - ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_COLOR_TEMP.value, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MAX_MIREDS.value, ATTR_MIN_COLOR_TEMP_KELVIN, - ATTR_MIN_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS.value, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index 713ce553ae6..303bf68f68c 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -2623,17 +2623,34 @@ def test_all(module: ModuleType) -> None: @pytest.mark.parametrize( - ("constant_name", "constant_value"), - [("SUPPORT_BRIGHTNESS", 1), ("SUPPORT_COLOR_TEMP", 2), ("SUPPORT_COLOR", 16)], + ("constant_name", "constant_value", "constant_replacement"), + [ + ("SUPPORT_BRIGHTNESS", 1, "supported_color_modes"), + ("SUPPORT_COLOR_TEMP", 2, "supported_color_modes"), + ("SUPPORT_COLOR", 16, "supported_color_modes"), + ("ATTR_COLOR_TEMP", "color_temp", "kelvin equivalent (ATTR_COLOR_TEMP_KELVIN)"), + ("ATTR_KELVIN", "kelvin", "ATTR_COLOR_TEMP_KELVIN"), + ( + "ATTR_MIN_MIREDS", + "min_mireds", + "kelvin equivalent (ATTR_MAX_COLOR_TEMP_KELVIN)", + ), + ( + "ATTR_MAX_MIREDS", + "max_mireds", + "kelvin equivalent (ATTR_MIN_COLOR_TEMP_KELVIN)", + ), + ], ) -def test_deprecated_support_light_constants( +def test_deprecated_light_constants( caplog: pytest.LogCaptureFixture, constant_name: str, - constant_value: int, + constant_value: int | str, + constant_replacement: str, ) -> None: - """Test deprecated format constants.""" + """Test deprecated light constants.""" import_and_test_deprecated_constant( - caplog, light, constant_name, "supported_color_modes", constant_value, "2026.1" + caplog, light, constant_name, constant_replacement, constant_value, "2026.1" ) @@ -2663,3 +2680,61 @@ def test_deprecated_color_mode_constants_enums( import_and_test_deprecated_constant_enum( caplog, light, entity_feature, "COLOR_MODE_", "2026.1" ) + + +async def test_deprecated_turn_on_arguments( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test color temp conversion in service calls.""" + entity = MockLight("Test_ct", STATE_ON, {light.ColorMode.COLOR_TEMP}) + setup_test_component_platform(hass, light.DOMAIN, [entity]) + + assert await async_setup_component( + hass, light.DOMAIN, {light.DOMAIN: {"platform": "test"}} + ) + await hass.async_block_till_done() + + state = hass.states.get(entity.entity_id) + assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] + + caplog.clear() + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": [entity.entity_id], + "color_temp": 200, + }, + blocking=True, + ) + assert "Got `color_temp` argument in `turn_on` service" in caplog.text + _, data = entity.last_call("turn_on") + assert data == {"color_temp": 200, "color_temp_kelvin": 5000} + + caplog.clear() + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": [entity.entity_id], + "kelvin": 5000, + }, + blocking=True, + ) + assert "Got `kelvin` argument in `turn_on` service" in caplog.text + _, data = entity.last_call("turn_on") + assert data == {"color_temp": 200, "color_temp_kelvin": 5000} + + caplog.clear() + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": [entity.entity_id], + "color_temp_kelvin": 5000, + }, + blocking=True, + ) + _, data = entity.last_call("turn_on") + assert data == {"color_temp": 200, "color_temp_kelvin": 5000} + assert "argument in `turn_on` service" not in caplog.text diff --git a/tests/components/light/test_recorder.py b/tests/components/light/test_recorder.py index f3f87ff6074..d53ece61170 100644 --- a/tests/components/light/test_recorder.py +++ b/tests/components/light/test_recorder.py @@ -9,17 +9,17 @@ import pytest from homeassistant.components import light from homeassistant.components.light import ( + _DEPRECATED_ATTR_COLOR_TEMP, + _DEPRECATED_ATTR_MAX_MIREDS, + _DEPRECATED_ATTR_MIN_MIREDS, ATTR_BRIGHTNESS, ATTR_COLOR_MODE, - ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_EFFECT_LIST, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, - ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, - ATTR_MIN_MIREDS, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, @@ -66,8 +66,8 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) assert len(states) >= 1 for entity_states in states.values(): for state in entity_states: - assert ATTR_MIN_MIREDS not in state.attributes - assert ATTR_MAX_MIREDS not in state.attributes + assert _DEPRECATED_ATTR_MIN_MIREDS.value not in state.attributes + assert _DEPRECATED_ATTR_MAX_MIREDS.value not in state.attributes assert ATTR_SUPPORTED_COLOR_MODES not in state.attributes assert ATTR_EFFECT_LIST not in state.attributes assert ATTR_FRIENDLY_NAME in state.attributes @@ -75,7 +75,7 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) assert ATTR_MIN_COLOR_TEMP_KELVIN not in state.attributes assert ATTR_BRIGHTNESS not in state.attributes assert ATTR_COLOR_MODE not in state.attributes - assert ATTR_COLOR_TEMP not in state.attributes + assert _DEPRECATED_ATTR_COLOR_TEMP.value not in state.attributes assert ATTR_COLOR_TEMP_KELVIN not in state.attributes assert ATTR_EFFECT not in state.attributes assert ATTR_HS_COLOR not in state.attributes From a812b594aac3f274b9ba660b7d778e62d8b9d389 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 13 Dec 2024 16:55:30 +0100 Subject: [PATCH 225/677] Fix Tailwind config entry typing in async_unload_entry signature (#133153) --- homeassistant/components/tailwind/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/homeassistant/components/tailwind/__init__.py b/homeassistant/components/tailwind/__init__.py index c48f5344763..b191d78f2a6 100644 --- a/homeassistant/components/tailwind/__init__.py +++ b/homeassistant/components/tailwind/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -38,6 +37,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TailwindConfigEntry) -> return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TailwindConfigEntry) -> bool: """Unload Tailwind config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) From 8b6495f456bf60252a9444d75db89efe5b50b781 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Fri, 13 Dec 2024 19:06:44 +0100 Subject: [PATCH 226/677] Bump ruff to 0.8.3 (#133163) --- .pre-commit-config.yaml | 2 +- requirements_test_pre_commit.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5d65225f512..6ecae762dcd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.2 + rev: v0.8.3 hooks: - id: ruff args: diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index aa04dbeb6d0..dcddf267eb4 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.8.2 +ruff==0.8.3 yamllint==1.35.1 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index a4f33c3ad40..369beb538ed 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -22,7 +22,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.8,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.2 \ + stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.3 \ PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.9 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" From e13fa8346a481fcf452ec89ff7d9d8fc6eb59b61 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 13 Dec 2024 20:15:05 +0100 Subject: [PATCH 227/677] Update debugpy to 1.8.11 (#133169) --- homeassistant/components/debugpy/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/debugpy/manifest.json b/homeassistant/components/debugpy/manifest.json index c6e7f79be49..078af8c67a5 100644 --- a/homeassistant/components/debugpy/manifest.json +++ b/homeassistant/components/debugpy/manifest.json @@ -6,5 +6,5 @@ "integration_type": "service", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["debugpy==1.8.8"] + "requirements": ["debugpy==1.8.11"] } diff --git a/requirements_all.txt b/requirements_all.txt index 07261f2673f..3fab70ecab3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -730,7 +730,7 @@ datapoint==0.9.9 dbus-fast==2.24.3 # homeassistant.components.debugpy -debugpy==1.8.8 +debugpy==1.8.11 # homeassistant.components.decora_wifi # decora-wifi==1.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4b39c915e97..06fd689a0ff 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -626,7 +626,7 @@ datapoint==0.9.9 dbus-fast==2.24.3 # homeassistant.components.debugpy -debugpy==1.8.8 +debugpy==1.8.11 # homeassistant.components.ecovacs deebot-client==9.4.0 From 50b897bdaa780ed11a7b947ec898531584195b12 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Fri, 13 Dec 2024 13:59:46 -0600 Subject: [PATCH 228/677] Add STT error code for cloud authentication failure (#133170) --- .../components/assist_pipeline/pipeline.py | 6 +++ .../assist_pipeline/snapshots/test_init.ambr | 36 ++++++++++++++++ tests/components/assist_pipeline/test_init.py | 41 +++++++++++++++++++ 3 files changed, 83 insertions(+) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index f8f6be3a40f..7dda24c4023 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -16,6 +16,7 @@ import time from typing import Any, Literal, cast import wave +import hass_nabucasa import voluptuous as vol from homeassistant.components import ( @@ -918,6 +919,11 @@ class PipelineRun: ) except (asyncio.CancelledError, TimeoutError): raise # expected + except hass_nabucasa.auth.Unauthenticated as src_error: + raise SpeechToTextError( + code="cloud-auth-failed", + message="Home Assistant Cloud authentication failed", + ) from src_error except Exception as src_error: _LOGGER.exception("Unexpected error during speech-to-text") raise SpeechToTextError( diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index d3241b8ac1f..f63a28efbb7 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -387,6 +387,42 @@ }), ]) # --- +# name: test_pipeline_from_audio_stream_with_cloud_auth_fail + list([ + dict({ + 'data': dict({ + 'language': 'en', + 'pipeline': , + }), + 'type': , + }), + dict({ + 'data': dict({ + 'engine': 'stt.mock_stt', + 'metadata': dict({ + 'bit_rate': , + 'channel': , + 'codec': , + 'format': , + 'language': 'en-US', + 'sample_rate': , + }), + }), + 'type': , + }), + dict({ + 'data': dict({ + 'code': 'cloud-auth-failed', + 'message': 'Home Assistant Cloud authentication failed', + }), + 'type': , + }), + dict({ + 'data': None, + 'type': , + }), + ]) +# --- # name: test_pipeline_language_used_instead_of_conversation_language list([ dict({ diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index a3e65766c34..d4cce4e2e98 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -8,6 +8,7 @@ import tempfile from unittest.mock import ANY, patch import wave +import hass_nabucasa import pytest from syrupy.assertion import SnapshotAssertion @@ -1173,3 +1174,43 @@ async def test_pipeline_language_used_instead_of_conversation_language( mock_async_converse.call_args_list[0].kwargs.get("language") == pipeline.language ) + + +async def test_pipeline_from_audio_stream_with_cloud_auth_fail( + hass: HomeAssistant, + mock_stt_provider_entity: MockSTTProviderEntity, + init_components, + snapshot: SnapshotAssertion, +) -> None: + """Test creating a pipeline from an audio stream but the cloud authentication fails.""" + + events: list[assist_pipeline.PipelineEvent] = [] + + async def audio_data(): + yield b"audio" + + with patch.object( + mock_stt_provider_entity, + "async_process_audio_stream", + side_effect=hass_nabucasa.auth.Unauthenticated, + ): + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) + + assert process_events(events) == snapshot + assert len(events) == 4 # run start, stt start, error, run end + assert events[2].type == assist_pipeline.PipelineEventType.ERROR + assert events[2].data["code"] == "cloud-auth-failed" From f06fda80234a8ac429dc4216ee4ddd7758d71e96 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Fri, 13 Dec 2024 14:19:43 -0600 Subject: [PATCH 229/677] Add response slot to HassRespond intent (#133162) --- homeassistant/components/intent/__init__.py | 16 +++++++++++++--- tests/components/intent/test_init.py | 11 +++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/intent/__init__.py b/homeassistant/components/intent/__init__.py index 1ffb8747d91..71ef40ad369 100644 --- a/homeassistant/components/intent/__init__.py +++ b/homeassistant/components/intent/__init__.py @@ -139,7 +139,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: intent.async_register(hass, TimerStatusIntentHandler()) intent.async_register(hass, GetCurrentDateIntentHandler()) intent.async_register(hass, GetCurrentTimeIntentHandler()) - intent.async_register(hass, HelloIntentHandler()) + intent.async_register(hass, RespondIntentHandler()) return True @@ -423,15 +423,25 @@ class GetCurrentTimeIntentHandler(intent.IntentHandler): return response -class HelloIntentHandler(intent.IntentHandler): +class RespondIntentHandler(intent.IntentHandler): """Responds with no action.""" intent_type = intent.INTENT_RESPOND description = "Returns the provided response with no action." + slot_schema = { + vol.Optional("response"): cv.string, + } + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: """Return the provided response, but take no action.""" - return intent_obj.create_response() + slots = self.async_validate_slots(intent_obj.slots) + response = intent_obj.create_response() + + if "response" in slots: + response.async_set_speech(slots["response"]["value"]) + + return response async def _async_process_intent( diff --git a/tests/components/intent/test_init.py b/tests/components/intent/test_init.py index 20c0f9d8d44..0db9682d0ad 100644 --- a/tests/components/intent/test_init.py +++ b/tests/components/intent/test_init.py @@ -466,3 +466,14 @@ async def test_intents_with_no_responses(hass: HomeAssistant) -> None: for intent_name in (intent.INTENT_NEVERMIND, intent.INTENT_RESPOND): response = await intent.async_handle(hass, "test", intent_name, {}) assert not response.speech + + +async def test_intents_respond_intent(hass: HomeAssistant) -> None: + """Test HassRespond intent with a response slot value.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "intent", {}) + + response = await intent.async_handle( + hass, "test", intent.INTENT_RESPOND, {"response": {"value": "Hello World"}} + ) + assert response.speech["plain"]["speech"] == "Hello World" From 0c8db8c8d6e0049cdf830fd176ed1c07c8a78712 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Fri, 13 Dec 2024 22:29:18 +0100 Subject: [PATCH 230/677] Add eheimdigital integration (#126757) Co-authored-by: Franck Nijhof --- .strict-typing | 1 + CODEOWNERS | 2 + .../components/eheimdigital/__init__.py | 51 +++ .../components/eheimdigital/config_flow.py | 127 +++++++ .../components/eheimdigital/const.py | 17 + .../components/eheimdigital/coordinator.py | 78 +++++ .../components/eheimdigital/entity.py | 53 +++ .../components/eheimdigital/light.py | 127 +++++++ .../components/eheimdigital/manifest.json | 15 + .../eheimdigital/quality_scale.yaml | 70 ++++ .../components/eheimdigital/strings.json | 39 +++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + homeassistant/generated/zeroconf.py | 4 + mypy.ini | 10 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/eheimdigital/__init__.py | 1 + tests/components/eheimdigital/conftest.py | 58 ++++ .../eheimdigital/snapshots/test_light.ambr | 316 ++++++++++++++++++ .../eheimdigital/test_config_flow.py | 212 ++++++++++++ tests/components/eheimdigital/test_init.py | 55 +++ tests/components/eheimdigital/test_light.py | 249 ++++++++++++++ 23 files changed, 1498 insertions(+) create mode 100644 homeassistant/components/eheimdigital/__init__.py create mode 100644 homeassistant/components/eheimdigital/config_flow.py create mode 100644 homeassistant/components/eheimdigital/const.py create mode 100644 homeassistant/components/eheimdigital/coordinator.py create mode 100644 homeassistant/components/eheimdigital/entity.py create mode 100644 homeassistant/components/eheimdigital/light.py create mode 100644 homeassistant/components/eheimdigital/manifest.json create mode 100644 homeassistant/components/eheimdigital/quality_scale.yaml create mode 100644 homeassistant/components/eheimdigital/strings.json create mode 100644 tests/components/eheimdigital/__init__.py create mode 100644 tests/components/eheimdigital/conftest.py create mode 100644 tests/components/eheimdigital/snapshots/test_light.ambr create mode 100644 tests/components/eheimdigital/test_config_flow.py create mode 100644 tests/components/eheimdigital/test_init.py create mode 100644 tests/components/eheimdigital/test_light.py diff --git a/.strict-typing b/.strict-typing index ade5d6afb7b..66dae130fb5 100644 --- a/.strict-typing +++ b/.strict-typing @@ -170,6 +170,7 @@ homeassistant.components.easyenergy.* homeassistant.components.ecovacs.* homeassistant.components.ecowitt.* homeassistant.components.efergy.* +homeassistant.components.eheimdigital.* homeassistant.components.electrasmart.* homeassistant.components.electric_kiwi.* homeassistant.components.elevenlabs.* diff --git a/CODEOWNERS b/CODEOWNERS index afd150ffb0c..06eb70c7576 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -387,6 +387,8 @@ build.json @home-assistant/supervisor /homeassistant/components/efergy/ @tkdrob /tests/components/efergy/ @tkdrob /homeassistant/components/egardia/ @jeroenterheerdt +/homeassistant/components/eheimdigital/ @autinerd +/tests/components/eheimdigital/ @autinerd /homeassistant/components/electrasmart/ @jafar-atili /tests/components/electrasmart/ @jafar-atili /homeassistant/components/electric_kiwi/ @mikey0000 diff --git a/homeassistant/components/eheimdigital/__init__.py b/homeassistant/components/eheimdigital/__init__.py new file mode 100644 index 00000000000..cf08f45bed5 --- /dev/null +++ b/homeassistant/components/eheimdigital/__init__.py @@ -0,0 +1,51 @@ +"""The EHEIM Digital integration.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntry + +from .const import DOMAIN +from .coordinator import EheimDigitalUpdateCoordinator + +PLATFORMS = [Platform.LIGHT] + +type EheimDigitalConfigEntry = ConfigEntry[EheimDigitalUpdateCoordinator] + + +async def async_setup_entry( + hass: HomeAssistant, entry: EheimDigitalConfigEntry +) -> bool: + """Set up EHEIM Digital from a config entry.""" + + coordinator = EheimDigitalUpdateCoordinator(hass) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: EheimDigitalConfigEntry +) -> bool: + """Unload a config entry.""" + await entry.runtime_data.hub.close() + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_remove_config_entry_device( + hass: HomeAssistant, + config_entry: EheimDigitalConfigEntry, + device_entry: DeviceEntry, +) -> bool: + """Remove a config entry from a device.""" + return not any( + identifier + for identifier in device_entry.identifiers + if identifier[0] == DOMAIN + and identifier[1] in config_entry.runtime_data.hub.devices + ) diff --git a/homeassistant/components/eheimdigital/config_flow.py b/homeassistant/components/eheimdigital/config_flow.py new file mode 100644 index 00000000000..6994c6f65b5 --- /dev/null +++ b/homeassistant/components/eheimdigital/config_flow.py @@ -0,0 +1,127 @@ +"""Config flow for EHEIM Digital.""" + +from __future__ import annotations + +import asyncio +from typing import TYPE_CHECKING, Any + +from aiohttp import ClientError +from eheimdigital.device import EheimDigitalDevice +from eheimdigital.hub import EheimDigitalHub +import voluptuous as vol + +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST +from homeassistant.helpers import selector +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import DOMAIN, LOGGER + +CONFIG_SCHEMA = vol.Schema( + {vol.Required(CONF_HOST, default="eheimdigital.local"): selector.TextSelector()} +) + + +class EheimDigitalConfigFlow(ConfigFlow, domain=DOMAIN): + """The EHEIM Digital config flow.""" + + def __init__(self) -> None: + """Initialize the config flow.""" + super().__init__() + self.data: dict[str, Any] = {} + self.main_device_added_event = asyncio.Event() + + async def async_step_zeroconf( + self, discovery_info: ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery.""" + self.data[CONF_HOST] = host = discovery_info.host + + self._async_abort_entries_match(self.data) + + hub = EheimDigitalHub( + host=host, + session=async_get_clientsession(self.hass), + loop=self.hass.loop, + main_device_added_event=self.main_device_added_event, + ) + try: + await hub.connect() + + async with asyncio.timeout(2): + # This event gets triggered when the first message is received from + # the device, it contains the data necessary to create the main device. + # This removes the race condition where the main device is accessed + # before the response from the device is parsed. + await self.main_device_added_event.wait() + if TYPE_CHECKING: + # At this point the main device is always set + assert isinstance(hub.main, EheimDigitalDevice) + await hub.close() + except (ClientError, TimeoutError): + return self.async_abort(reason="cannot_connect") + except Exception: # noqa: BLE001 + return self.async_abort(reason="unknown") + await self.async_set_unique_id(hub.main.mac_address) + self._abort_if_unique_id_configured(updates={CONF_HOST: host}) + return await self.async_step_discovery_confirm() + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + if user_input is not None: + return self.async_create_entry( + title=self.data[CONF_HOST], + data={CONF_HOST: self.data[CONF_HOST]}, + ) + + self._set_confirm_only() + return self.async_show_form(step_id="discovery_confirm") + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user step.""" + if user_input is None: + return self.async_show_form(step_id=SOURCE_USER, data_schema=CONFIG_SCHEMA) + + self._async_abort_entries_match(user_input) + errors: dict[str, str] = {} + hub = EheimDigitalHub( + host=user_input[CONF_HOST], + session=async_get_clientsession(self.hass), + loop=self.hass.loop, + main_device_added_event=self.main_device_added_event, + ) + + try: + await hub.connect() + + async with asyncio.timeout(2): + # This event gets triggered when the first message is received from + # the device, it contains the data necessary to create the main device. + # This removes the race condition where the main device is accessed + # before the response from the device is parsed. + await self.main_device_added_event.wait() + if TYPE_CHECKING: + # At this point the main device is always set + assert isinstance(hub.main, EheimDigitalDevice) + await self.async_set_unique_id( + hub.main.mac_address, raise_on_progress=False + ) + await hub.close() + except (ClientError, TimeoutError): + errors["base"] = "cannot_connect" + except Exception: # noqa: BLE001 + errors["base"] = "unknown" + LOGGER.exception("Unknown exception occurred") + else: + self._abort_if_unique_id_configured() + return self.async_create_entry(data=user_input, title=user_input[CONF_HOST]) + return self.async_show_form( + step_id=SOURCE_USER, + data_schema=CONFIG_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/eheimdigital/const.py b/homeassistant/components/eheimdigital/const.py new file mode 100644 index 00000000000..5ed9303be40 --- /dev/null +++ b/homeassistant/components/eheimdigital/const.py @@ -0,0 +1,17 @@ +"""Constants for the EHEIM Digital integration.""" + +from logging import Logger, getLogger + +from eheimdigital.types import LightMode + +from homeassistant.components.light import EFFECT_OFF + +LOGGER: Logger = getLogger(__package__) +DOMAIN = "eheimdigital" + +EFFECT_DAYCL_MODE = "daycl_mode" + +EFFECT_TO_LIGHT_MODE = { + EFFECT_DAYCL_MODE: LightMode.DAYCL_MODE, + EFFECT_OFF: LightMode.MAN_MODE, +} diff --git a/homeassistant/components/eheimdigital/coordinator.py b/homeassistant/components/eheimdigital/coordinator.py new file mode 100644 index 00000000000..f122a1227c5 --- /dev/null +++ b/homeassistant/components/eheimdigital/coordinator.py @@ -0,0 +1,78 @@ +"""Data update coordinator for the EHEIM Digital integration.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +from typing import Any + +from aiohttp import ClientError +from eheimdigital.device import EheimDigitalDevice +from eheimdigital.hub import EheimDigitalHub +from eheimdigital.types import EheimDeviceType + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER + +type AsyncSetupDeviceEntitiesCallback = Callable[[str], Coroutine[Any, Any, None]] + + +class EheimDigitalUpdateCoordinator( + DataUpdateCoordinator[dict[str, EheimDigitalDevice]] +): + """The EHEIM Digital data update coordinator.""" + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the EHEIM Digital data update coordinator.""" + super().__init__( + hass, LOGGER, name=DOMAIN, update_interval=DEFAULT_SCAN_INTERVAL + ) + self.hub = EheimDigitalHub( + host=self.config_entry.data[CONF_HOST], + session=async_get_clientsession(hass), + loop=hass.loop, + receive_callback=self._async_receive_callback, + device_found_callback=self._async_device_found, + ) + self.known_devices: set[str] = set() + self.platform_callbacks: set[AsyncSetupDeviceEntitiesCallback] = set() + + def add_platform_callback( + self, + async_setup_device_entities: AsyncSetupDeviceEntitiesCallback, + ) -> None: + """Add the setup callbacks from a specific platform.""" + self.platform_callbacks.add(async_setup_device_entities) + + async def _async_device_found( + self, device_address: str, device_type: EheimDeviceType + ) -> None: + """Set up a new device found. + + This function is called from the library whenever a new device is added. + """ + + if device_address not in self.known_devices: + for platform_callback in self.platform_callbacks: + await platform_callback(device_address) + + async def _async_receive_callback(self) -> None: + self.async_set_updated_data(self.hub.devices) + + async def _async_setup(self) -> None: + await self.hub.connect() + await self.hub.update() + + async def _async_update_data(self) -> dict[str, EheimDigitalDevice]: + try: + await self.hub.update() + except ClientError as ex: + raise UpdateFailed from ex + return self.data diff --git a/homeassistant/components/eheimdigital/entity.py b/homeassistant/components/eheimdigital/entity.py new file mode 100644 index 00000000000..c0f91a4b798 --- /dev/null +++ b/homeassistant/components/eheimdigital/entity.py @@ -0,0 +1,53 @@ +"""Base entity for EHEIM Digital.""" + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +from eheimdigital.device import EheimDigitalDevice + +from homeassistant.const import CONF_HOST +from homeassistant.core import callback +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import EheimDigitalUpdateCoordinator + + +class EheimDigitalEntity[_DeviceT: EheimDigitalDevice]( + CoordinatorEntity[EheimDigitalUpdateCoordinator], ABC +): + """Represent a EHEIM Digital entity.""" + + _attr_has_entity_name = True + + def __init__( + self, coordinator: EheimDigitalUpdateCoordinator, device: _DeviceT + ) -> None: + """Initialize a EHEIM Digital entity.""" + super().__init__(coordinator) + if TYPE_CHECKING: + # At this point at least one device is found and so there is always a main device set + assert isinstance(coordinator.hub.main, EheimDigitalDevice) + self._attr_device_info = DeviceInfo( + configuration_url=f"http://{coordinator.config_entry.data[CONF_HOST]}", + name=device.name, + connections={(CONNECTION_NETWORK_MAC, device.mac_address)}, + manufacturer="EHEIM", + model=device.device_type.model_name, + identifiers={(DOMAIN, device.mac_address)}, + suggested_area=device.aquarium_name, + sw_version=device.sw_version, + via_device=(DOMAIN, coordinator.hub.main.mac_address), + ) + self._device = device + self._device_address = device.mac_address + + @abstractmethod + def _async_update_attrs(self) -> None: ... + + @callback + def _handle_coordinator_update(self) -> None: + """Update attributes when the coordinator updates.""" + self._async_update_attrs() + super()._handle_coordinator_update() diff --git a/homeassistant/components/eheimdigital/light.py b/homeassistant/components/eheimdigital/light.py new file mode 100644 index 00000000000..a119e0bda8d --- /dev/null +++ b/homeassistant/components/eheimdigital/light.py @@ -0,0 +1,127 @@ +"""EHEIM Digital lights.""" + +from typing import Any + +from eheimdigital.classic_led_ctrl import EheimDigitalClassicLEDControl +from eheimdigital.types import EheimDigitalClientError, LightMode + +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_EFFECT, + EFFECT_OFF, + ColorMode, + LightEntity, + LightEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.color import brightness_to_value, value_to_brightness + +from . import EheimDigitalConfigEntry +from .const import EFFECT_DAYCL_MODE, EFFECT_TO_LIGHT_MODE +from .coordinator import EheimDigitalUpdateCoordinator +from .entity import EheimDigitalEntity + +BRIGHTNESS_SCALE = (1, 100) + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: EheimDigitalConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the callbacks for the coordinator so lights can be added as devices are found.""" + coordinator = entry.runtime_data + + async def async_setup_device_entities(device_address: str) -> None: + """Set up the light entities for a device.""" + device = coordinator.hub.devices[device_address] + entities: list[EheimDigitalClassicLEDControlLight] = [] + + if isinstance(device, EheimDigitalClassicLEDControl): + for channel in range(2): + if len(device.tankconfig[channel]) > 0: + entities.append( + EheimDigitalClassicLEDControlLight(coordinator, device, channel) + ) + coordinator.known_devices.add(device.mac_address) + async_add_entities(entities) + + coordinator.add_platform_callback(async_setup_device_entities) + + for device_address in entry.runtime_data.hub.devices: + await async_setup_device_entities(device_address) + + +class EheimDigitalClassicLEDControlLight( + EheimDigitalEntity[EheimDigitalClassicLEDControl], LightEntity +): + """Represent a EHEIM Digital classicLEDcontrol light.""" + + _attr_supported_color_modes = {ColorMode.BRIGHTNESS} + _attr_color_mode = ColorMode.BRIGHTNESS + _attr_effect_list = [EFFECT_DAYCL_MODE] + _attr_supported_features = LightEntityFeature.EFFECT + _attr_translation_key = "channel" + + def __init__( + self, + coordinator: EheimDigitalUpdateCoordinator, + device: EheimDigitalClassicLEDControl, + channel: int, + ) -> None: + """Initialize an EHEIM Digital classicLEDcontrol light entity.""" + super().__init__(coordinator, device) + self._channel = channel + self._attr_translation_placeholders = {"channel_id": str(channel)} + self._attr_unique_id = f"{self._device_address}_{channel}" + self._async_update_attrs() + + @property + def available(self) -> bool: + """Return whether the entity is available.""" + return super().available and self._device.light_level[self._channel] is not None + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the light.""" + if ATTR_EFFECT in kwargs: + await self._device.set_light_mode(EFFECT_TO_LIGHT_MODE[kwargs[ATTR_EFFECT]]) + return + if ATTR_BRIGHTNESS in kwargs: + if self._device.light_mode == LightMode.DAYCL_MODE: + await self._device.set_light_mode(LightMode.MAN_MODE) + try: + await self._device.turn_on( + int(brightness_to_value(BRIGHTNESS_SCALE, kwargs[ATTR_BRIGHTNESS])), + self._channel, + ) + except EheimDigitalClientError as err: + raise HomeAssistantError from err + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the light.""" + if self._device.light_mode == LightMode.DAYCL_MODE: + await self._device.set_light_mode(LightMode.MAN_MODE) + try: + await self._device.turn_off(self._channel) + except EheimDigitalClientError as err: + raise HomeAssistantError from err + + def _async_update_attrs(self) -> None: + light_level = self._device.light_level[self._channel] + + self._attr_is_on = light_level > 0 if light_level is not None else None + self._attr_brightness = ( + value_to_brightness(BRIGHTNESS_SCALE, light_level) + if light_level is not None + else None + ) + self._attr_effect = ( + EFFECT_DAYCL_MODE + if self._device.light_mode == LightMode.DAYCL_MODE + else EFFECT_OFF + ) diff --git a/homeassistant/components/eheimdigital/manifest.json b/homeassistant/components/eheimdigital/manifest.json new file mode 100644 index 00000000000..159aecd6b6c --- /dev/null +++ b/homeassistant/components/eheimdigital/manifest.json @@ -0,0 +1,15 @@ +{ + "domain": "eheimdigital", + "name": "EHEIM Digital", + "codeowners": ["@autinerd"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/eheimdigital", + "integration_type": "hub", + "iot_class": "local_polling", + "loggers": ["eheimdigital"], + "quality_scale": "bronze", + "requirements": ["eheimdigital==1.0.3"], + "zeroconf": [ + { "type": "_http._tcp.local.", "name": "eheimdigital._http._tcp.local." } + ] +} diff --git a/homeassistant/components/eheimdigital/quality_scale.yaml b/homeassistant/components/eheimdigital/quality_scale.yaml new file mode 100644 index 00000000000..a56551a14f6 --- /dev/null +++ b/homeassistant/components/eheimdigital/quality_scale.yaml @@ -0,0 +1,70 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: No service actions implemented. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: No service actions implemented. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: No service actions implemented. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: This integration doesn't have an options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: + status: exempt + comment: This integration requires no authentication. + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: todo + stale-devices: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/eheimdigital/strings.json b/homeassistant/components/eheimdigital/strings.json new file mode 100644 index 00000000000..0e6fa6a0814 --- /dev/null +++ b/homeassistant/components/eheimdigital/strings.json @@ -0,0 +1,39 @@ +{ + "config": { + "step": { + "discovery_confirm": { + "description": "[%key:common::config_flow::description::confirm_setup%]" + }, + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The host or IP address of your main device. Only needed to change if 'eheimdigital' doesn't work." + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "entity": { + "light": { + "channel": { + "name": "Channel {channel_id}", + "state_attributes": { + "effect": { + "state": { + "daycl_mode": "Daycycle mode" + } + } + } + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 930bda4e81b..3b33d31a2a2 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -155,6 +155,7 @@ FLOWS = { "ecowitt", "edl21", "efergy", + "eheimdigital", "electrasmart", "electric_kiwi", "elevenlabs", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index ecbe3f0dcbf..1530e308e7d 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1524,6 +1524,12 @@ "config_flow": false, "iot_class": "local_polling" }, + "eheimdigital": { + "name": "EHEIM Digital", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "electrasmart": { "name": "Electra Smart", "integration_type": "hub", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index b04e6ad6f52..e5b50841d11 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -524,6 +524,10 @@ ZEROCONF = { "domain": "bosch_shc", "name": "bosch shc*", }, + { + "domain": "eheimdigital", + "name": "eheimdigital._http._tcp.local.", + }, { "domain": "lektrico", "name": "lektrico*", diff --git a/mypy.ini b/mypy.ini index 2d8e0ea3f61..6daf54a8eb7 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1455,6 +1455,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.eheimdigital.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.electrasmart.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 3fab70ecab3..7eab703836c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -809,6 +809,9 @@ ebusdpy==0.0.17 # homeassistant.components.ecoal_boiler ecoaliface==0.4.0 +# homeassistant.components.eheimdigital +eheimdigital==1.0.3 + # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 06fd689a0ff..2a785e363f7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -687,6 +687,9 @@ eagle100==0.1.1 # homeassistant.components.easyenergy easyenergy==2.1.2 +# homeassistant.components.eheimdigital +eheimdigital==1.0.3 + # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 diff --git a/tests/components/eheimdigital/__init__.py b/tests/components/eheimdigital/__init__.py new file mode 100644 index 00000000000..1f608f868de --- /dev/null +++ b/tests/components/eheimdigital/__init__.py @@ -0,0 +1 @@ +"""Tests for the EHEIM Digital integration.""" diff --git a/tests/components/eheimdigital/conftest.py b/tests/components/eheimdigital/conftest.py new file mode 100644 index 00000000000..cdad628de6b --- /dev/null +++ b/tests/components/eheimdigital/conftest.py @@ -0,0 +1,58 @@ +"""Configurations for the EHEIM Digital tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from eheimdigital.classic_led_ctrl import EheimDigitalClassicLEDControl +from eheimdigital.hub import EheimDigitalHub +from eheimdigital.types import EheimDeviceType, LightMode +import pytest + +from homeassistant.components.eheimdigital.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "eheimdigital"}, unique_id="00:00:00:00:00:01" + ) + + +@pytest.fixture +def classic_led_ctrl_mock(): + """Mock a classicLEDcontrol device.""" + classic_led_ctrl_mock = MagicMock(spec=EheimDigitalClassicLEDControl) + classic_led_ctrl_mock.tankconfig = [["CLASSIC_DAYLIGHT"], []] + classic_led_ctrl_mock.mac_address = "00:00:00:00:00:01" + classic_led_ctrl_mock.device_type = ( + EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + classic_led_ctrl_mock.name = "Mock classicLEDcontrol+e" + classic_led_ctrl_mock.aquarium_name = "Mock Aquarium" + classic_led_ctrl_mock.light_mode = LightMode.DAYCL_MODE + classic_led_ctrl_mock.light_level = (10, 39) + return classic_led_ctrl_mock + + +@pytest.fixture +def eheimdigital_hub_mock(classic_led_ctrl_mock: MagicMock) -> Generator[AsyncMock]: + """Mock eheimdigital hub.""" + with ( + patch( + "homeassistant.components.eheimdigital.coordinator.EheimDigitalHub", + spec=EheimDigitalHub, + ) as eheimdigital_hub_mock, + patch( + "homeassistant.components.eheimdigital.config_flow.EheimDigitalHub", + new=eheimdigital_hub_mock, + ), + ): + eheimdigital_hub_mock.return_value.devices = { + "00:00:00:00:00:01": classic_led_ctrl_mock + } + eheimdigital_hub_mock.return_value.main = classic_led_ctrl_mock + yield eheimdigital_hub_mock diff --git a/tests/components/eheimdigital/snapshots/test_light.ambr b/tests/components/eheimdigital/snapshots/test_light.ambr new file mode 100644 index 00000000000..8df4745997e --- /dev/null +++ b/tests/components/eheimdigital/snapshots/test_light.ambr @@ -0,0 +1,316 @@ +# serializer version: 1 +# name: test_dynamic_new_devices[light.mock_classicledcontrol_e_channel_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 0', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_dynamic_new_devices[light.mock_classicledcontrol_e_channel_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig0][light.mock_classicledcontrol_e_channel_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 0', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig0][light.mock_classicledcontrol_e_channel_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig1][light.mock_classicledcontrol_e_channel_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 1', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig1][light.mock_classicledcontrol_e_channel_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 99, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 1', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 0', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'daycl_mode', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Channel 1', + 'platform': 'eheimdigital', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'channel', + 'unique_id': '00:00:00:00:00:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_classic_led_ctrl[tankconfig2][light.mock_classicledcontrol_e_channel_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 99, + 'color_mode': , + 'effect': 'daycl_mode', + 'effect_list': list([ + 'daycl_mode', + ]), + 'friendly_name': 'Mock classicLEDcontrol+e Channel 1', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.mock_classicledcontrol_e_channel_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/eheimdigital/test_config_flow.py b/tests/components/eheimdigital/test_config_flow.py new file mode 100644 index 00000000000..e75cf31eb98 --- /dev/null +++ b/tests/components/eheimdigital/test_config_flow.py @@ -0,0 +1,212 @@ +"""Tests the config flow of EHEIM Digital.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock, MagicMock, patch + +from aiohttp import ClientConnectionError +import pytest + +from homeassistant.components.eheimdigital.const import DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +ZEROCONF_DISCOVERY = ZeroconfServiceInfo( + ip_address=ip_address("192.0.2.1"), + ip_addresses=[ip_address("192.0.2.1")], + hostname="eheimdigital.local.", + name="eheimdigital._http._tcp.local.", + port=80, + type="_http._tcp.local.", + properties={}, +) + +USER_INPUT = {CONF_HOST: "eheimdigital"} + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_full_flow(hass: HomeAssistant, eheimdigital_hub_mock: AsyncMock) -> None: + """Test full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_INPUT[CONF_HOST] + assert result["data"] == USER_INPUT + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +@pytest.mark.parametrize( + ("side_effect", "error_value"), + [(ClientConnectionError(), "cannot_connect"), (Exception(), "unknown")], +) +async def test_flow_errors( + hass: HomeAssistant, + eheimdigital_hub_mock: AsyncMock, + side_effect: BaseException, + error_value: str, +) -> None: + """Test flow errors.""" + eheimdigital_hub_mock.return_value.connect.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_value} + + eheimdigital_hub_mock.return_value.connect.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_INPUT[CONF_HOST] + assert result["data"] == USER_INPUT + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_zeroconf_flow( + hass: HomeAssistant, eheimdigital_hub_mock: AsyncMock +) -> None: + """Test zeroconf flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == ZEROCONF_DISCOVERY.host + assert result["data"] == { + CONF_HOST: ZEROCONF_DISCOVERY.host, + } + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + +@pytest.mark.parametrize( + ("side_effect", "error_value"), + [(ClientConnectionError(), "cannot_connect"), (Exception(), "unknown")], +) +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_zeroconf_flow_errors( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + side_effect: BaseException, + error_value: str, +) -> None: + """Test zeroconf flow errors.""" + eheimdigital_hub_mock.return_value.connect.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == error_value + + +@patch("homeassistant.components.eheimdigital.config_flow.asyncio.Event", new=AsyncMock) +async def test_abort(hass: HomeAssistant, eheimdigital_hub_mock: AsyncMock) -> None: + """Test flow abort on matching data or unique_id.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_INPUT[CONF_HOST] + assert result["data"] == USER_INPUT + assert ( + result["result"].unique_id + == eheimdigital_hub_mock.return_value.main.mac_address + ) + + result2 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "user" + + result2 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + USER_INPUT, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + result3 = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "user" + + result2 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + {CONF_HOST: "eheimdigital2"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" diff --git a/tests/components/eheimdigital/test_init.py b/tests/components/eheimdigital/test_init.py new file mode 100644 index 00000000000..211a8b3b6fd --- /dev/null +++ b/tests/components/eheimdigital/test_init.py @@ -0,0 +1,55 @@ +"""Tests for the init module.""" + +from unittest.mock import MagicMock + +from eheimdigital.types import EheimDeviceType + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator + + +async def test_remove_device( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test removing a device.""" + assert await async_setup_component(hass, "config", {}) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + mac_address: str = eheimdigital_hub_mock.return_value.main.mac_address + + device_entry = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, mac_address)}, + ) + assert device_entry is not None + + hass_client = await hass_ws_client(hass) + + # Do not allow to delete a connected device + response = await hass_client.remove_device( + device_entry.id, mock_config_entry.entry_id + ) + assert not response["success"] + + eheimdigital_hub_mock.return_value.devices = {} + + # Allow to delete a not connected device + response = await hass_client.remove_device( + device_entry.id, mock_config_entry.entry_id + ) + assert response["success"] diff --git a/tests/components/eheimdigital/test_light.py b/tests/components/eheimdigital/test_light.py new file mode 100644 index 00000000000..da224979c43 --- /dev/null +++ b/tests/components/eheimdigital/test_light.py @@ -0,0 +1,249 @@ +"""Tests for the light module.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from aiohttp import ClientError +from eheimdigital.types import EheimDeviceType, LightMode +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.eheimdigital.const import EFFECT_DAYCL_MODE +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_EFFECT, + DOMAIN as LIGHT_DOMAIN, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.util.color import value_to_brightness + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.mark.parametrize( + "tankconfig", + [ + [["CLASSIC_DAYLIGHT"], []], + [[], ["CLASSIC_DAYLIGHT"]], + [["CLASSIC_DAYLIGHT"], ["CLASSIC_DAYLIGHT"]], + ], +) +async def test_setup_classic_led_ctrl( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + tankconfig: list[list[str]], + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test light platform setup with different channels.""" + mock_config_entry.add_to_hass(hass) + + classic_led_ctrl_mock.tankconfig = tankconfig + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_dynamic_new_devices( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + classic_led_ctrl_mock: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test light platform setup with at first no devices and dynamically adding a device.""" + mock_config_entry.add_to_hass(hass) + + eheimdigital_hub_mock.return_value.devices = {} + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert ( + len( + entity_registry.entities.get_entries_for_config_entry_id( + mock_config_entry.entry_id + ) + ) + == 0 + ) + + eheimdigital_hub_mock.return_value.devices = { + "00:00:00:00:00:01": classic_led_ctrl_mock + } + + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("eheimdigital_hub_mock") +async def test_turn_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test turning off the light.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await mock_config_entry.runtime_data._async_device_found( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "light.mock_classicledcontrol_e_channel_0"}, + blocking=True, + ) + + classic_led_ctrl_mock.set_light_mode.assert_awaited_once_with(LightMode.MAN_MODE) + classic_led_ctrl_mock.turn_off.assert_awaited_once_with(0) + + +@pytest.mark.parametrize( + ("dim_input", "expected_dim_value"), + [ + (3, 1), + (255, 100), + (128, 50), + ], +) +async def test_turn_on_brightness( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, + dim_input: int, + expected_dim_value: int, +) -> None: + """Test turning on the light with different brightness values.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.mock_classicledcontrol_e_channel_0", + ATTR_BRIGHTNESS: dim_input, + }, + blocking=True, + ) + + classic_led_ctrl_mock.set_light_mode.assert_awaited_once_with(LightMode.MAN_MODE) + classic_led_ctrl_mock.turn_on.assert_awaited_once_with(expected_dim_value, 0) + + +async def test_turn_on_effect( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test turning on the light with an effect value.""" + mock_config_entry.add_to_hass(hass) + + classic_led_ctrl_mock.light_mode = LightMode.MAN_MODE + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.mock_classicledcontrol_e_channel_0", + ATTR_EFFECT: EFFECT_DAYCL_MODE, + }, + blocking=True, + ) + + classic_led_ctrl_mock.set_light_mode.assert_awaited_once_with(LightMode.DAYCL_MODE) + + +async def test_state_update( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + classic_led_ctrl_mock: MagicMock, +) -> None: + """Test the light state update.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + classic_led_ctrl_mock.light_level = (20, 30) + + await eheimdigital_hub_mock.call_args.kwargs["receive_callback"]() + + assert (state := hass.states.get("light.mock_classicledcontrol_e_channel_0")) + assert state.attributes["brightness"] == value_to_brightness((1, 100), 20) + + +async def test_update_failed( + hass: HomeAssistant, + eheimdigital_hub_mock: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test an failed update.""" + mock_config_entry.add_to_hass(hass) + + with patch("homeassistant.components.eheimdigital.PLATFORMS", [Platform.LIGHT]): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await eheimdigital_hub_mock.call_args.kwargs["device_found_callback"]( + "00:00:00:00:00:01", EheimDeviceType.VERSION_EHEIM_CLASSIC_LED_CTRL_PLUS_E + ) + await hass.async_block_till_done() + + eheimdigital_hub_mock.return_value.update.side_effect = ClientError + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get("light.mock_classicledcontrol_e_channel_0").state + == STATE_UNAVAILABLE + ) From 1aabbec3dddaa3cc178a71d2957f478389f57cda Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 13 Dec 2024 16:37:26 -0500 Subject: [PATCH 231/677] Bump yalexs-ble to 2.5.4 (#133172) --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yalexs_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 99dbbc0ed9c..ed2c8007ee8 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 474ed36e90c..2ed1f4b5c43 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] } diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index 95d28cd5372..1472f9035ea 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.2"] + "requirements": ["yalexs-ble==2.5.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7eab703836c..4ce1c523171 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3055,7 +3055,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.2 +yalexs-ble==2.5.4 # homeassistant.components.august # homeassistant.components.yale diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2a785e363f7..0f9d94e2272 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2450,7 +2450,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.2 +yalexs-ble==2.5.4 # homeassistant.components.august # homeassistant.components.yale From 165ca5140c408927cdeb14eeab44a20845dddffe Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 13 Dec 2024 21:05:41 -0500 Subject: [PATCH 232/677] Bump uiprotect to 7.0.2 (#132975) --- .../components/unifiprotect/manifest.json | 2 +- .../components/unifiprotect/services.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/unifiprotect/conftest.py | 58 ++++++++++--------- .../unifiprotect/test_binary_sensor.py | 20 +++---- tests/components/unifiprotect/test_camera.py | 30 +++++----- tests/components/unifiprotect/test_event.py | 12 ++-- tests/components/unifiprotect/test_init.py | 2 +- tests/components/unifiprotect/test_light.py | 6 +- tests/components/unifiprotect/test_lock.py | 16 ++--- .../unifiprotect/test_media_player.py | 30 ++++++---- .../unifiprotect/test_media_source.py | 8 +-- tests/components/unifiprotect/test_number.py | 12 ++-- .../components/unifiprotect/test_recorder.py | 2 +- tests/components/unifiprotect/test_select.py | 20 +++---- tests/components/unifiprotect/test_sensor.py | 10 ++-- .../components/unifiprotect/test_services.py | 24 +++++--- tests/components/unifiprotect/test_switch.py | 18 +++--- tests/components/unifiprotect/test_text.py | 2 +- 20 files changed, 152 insertions(+), 126 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 9e8a0ea6c21..81ef72ec50d 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.8.0", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==7.0.2", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifiprotect/services.py b/homeassistant/components/unifiprotect/services.py index fc438240839..35713efdf3d 100644 --- a/homeassistant/components/unifiprotect/services.py +++ b/homeassistant/components/unifiprotect/services.py @@ -5,7 +5,7 @@ from __future__ import annotations import asyncio from typing import Any, cast -from pydantic.v1 import ValidationError +from pydantic import ValidationError from uiprotect.api import ProtectApiClient from uiprotect.data import Camera, Chime from uiprotect.exceptions import ClientError diff --git a/requirements_all.txt b/requirements_all.txt index 4ce1c523171..1e271ff1d57 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2905,7 +2905,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.8.0 +uiprotect==7.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0f9d94e2272..95d610361d9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2324,7 +2324,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.8.0 +uiprotect==7.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/tests/components/unifiprotect/conftest.py b/tests/components/unifiprotect/conftest.py index fad65c095df..3ed559b71ec 100644 --- a/tests/components/unifiprotect/conftest.py +++ b/tests/components/unifiprotect/conftest.py @@ -51,11 +51,11 @@ def mock_nvr(): nvr = NVR.from_unifi_dict(**data) # disable pydantic validation so mocking can happen - NVR.__config__.validate_assignment = False + NVR.model_config["validate_assignment"] = False yield nvr - NVR.__config__.validate_assignment = True + NVR.model_config["validate_assignment"] = True @pytest.fixture(name="ufp_config_entry") @@ -120,7 +120,11 @@ def mock_ufp_client(bootstrap: Bootstrap): client.base_url = "https://127.0.0.1" client.connection_host = IPv4Address("127.0.0.1") - client.get_nvr = AsyncMock(return_value=nvr) + + async def get_nvr(*args: Any, **kwargs: Any) -> NVR: + return client.bootstrap.nvr + + client.get_nvr = get_nvr client.get_bootstrap = AsyncMock(return_value=bootstrap) client.update = AsyncMock(return_value=bootstrap) client.async_disconnect_ws = AsyncMock() @@ -173,7 +177,7 @@ def camera_fixture(fixed_now: datetime): """Mock UniFi Protect Camera device.""" # disable pydantic validation so mocking can happen - Camera.__config__.validate_assignment = False + Camera.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_camera.json", integration=DOMAIN)) camera = Camera.from_unifi_dict(**data) @@ -181,23 +185,23 @@ def camera_fixture(fixed_now: datetime): yield camera - Camera.__config__.validate_assignment = True + Camera.model_config["validate_assignment"] = True @pytest.fixture(name="camera_all") def camera_all_fixture(camera: Camera): """Mock UniFi Protect Camera device.""" - all_camera = camera.copy() - all_camera.channels = [all_camera.channels[0].copy()] + all_camera = camera.model_copy() + all_camera.channels = [all_camera.channels[0].model_copy()] - medium_channel = all_camera.channels[0].copy() + medium_channel = all_camera.channels[0].model_copy() medium_channel.name = "Medium" medium_channel.id = 1 medium_channel.rtsp_alias = "test_medium_alias" all_camera.channels.append(medium_channel) - low_channel = all_camera.channels[0].copy() + low_channel = all_camera.channels[0].model_copy() low_channel.name = "Low" low_channel.id = 2 low_channel.rtsp_alias = "test_medium_alias" @@ -210,10 +214,10 @@ def camera_all_fixture(camera: Camera): def doorbell_fixture(camera: Camera, fixed_now: datetime): """Mock UniFi Protect Camera device (with chime).""" - doorbell = camera.copy() - doorbell.channels = [c.copy() for c in doorbell.channels] + doorbell = camera.model_copy() + doorbell.channels = [c.model_copy() for c in doorbell.channels] - package_channel = doorbell.channels[0].copy() + package_channel = doorbell.channels[0].model_copy() package_channel.name = "Package Camera" package_channel.id = 3 package_channel.fps = 2 @@ -247,8 +251,8 @@ def doorbell_fixture(camera: Camera, fixed_now: datetime): def unadopted_camera(camera: Camera): """Mock UniFi Protect Camera device (unadopted).""" - no_camera = camera.copy() - no_camera.channels = [c.copy() for c in no_camera.channels] + no_camera = camera.model_copy() + no_camera.channels = [c.model_copy() for c in no_camera.channels] no_camera.name = "Unadopted Camera" no_camera.is_adopted = False return no_camera @@ -259,19 +263,19 @@ def light_fixture(): """Mock UniFi Protect Light device.""" # disable pydantic validation so mocking can happen - Light.__config__.validate_assignment = False + Light.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_light.json", integration=DOMAIN)) yield Light.from_unifi_dict(**data) - Light.__config__.validate_assignment = True + Light.model_config["validate_assignment"] = True @pytest.fixture def unadopted_light(light: Light): """Mock UniFi Protect Light device (unadopted).""" - no_light = light.copy() + no_light = light.model_copy() no_light.name = "Unadopted Light" no_light.is_adopted = False return no_light @@ -282,12 +286,12 @@ def viewer(): """Mock UniFi Protect Viewport device.""" # disable pydantic validation so mocking can happen - Viewer.__config__.validate_assignment = False + Viewer.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_viewport.json", integration=DOMAIN)) yield Viewer.from_unifi_dict(**data) - Viewer.__config__.validate_assignment = True + Viewer.model_config["validate_assignment"] = True @pytest.fixture(name="sensor") @@ -295,7 +299,7 @@ def sensor_fixture(fixed_now: datetime): """Mock UniFi Protect Sensor device.""" # disable pydantic validation so mocking can happen - Sensor.__config__.validate_assignment = False + Sensor.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_sensor.json", integration=DOMAIN)) sensor: Sensor = Sensor.from_unifi_dict(**data) @@ -304,14 +308,14 @@ def sensor_fixture(fixed_now: datetime): sensor.alarm_triggered_at = fixed_now - timedelta(hours=1) yield sensor - Sensor.__config__.validate_assignment = True + Sensor.model_config["validate_assignment"] = True @pytest.fixture(name="sensor_all") def csensor_all_fixture(sensor: Sensor): """Mock UniFi Protect Sensor device.""" - all_sensor = sensor.copy() + all_sensor = sensor.model_copy() all_sensor.light_settings.is_enabled = True all_sensor.humidity_settings.is_enabled = True all_sensor.temperature_settings.is_enabled = True @@ -327,19 +331,19 @@ def doorlock_fixture(): """Mock UniFi Protect Doorlock device.""" # disable pydantic validation so mocking can happen - Doorlock.__config__.validate_assignment = False + Doorlock.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_doorlock.json", integration=DOMAIN)) yield Doorlock.from_unifi_dict(**data) - Doorlock.__config__.validate_assignment = True + Doorlock.model_config["validate_assignment"] = True @pytest.fixture def unadopted_doorlock(doorlock: Doorlock): """Mock UniFi Protect Light device (unadopted).""" - no_doorlock = doorlock.copy() + no_doorlock = doorlock.model_copy() no_doorlock.name = "Unadopted Lock" no_doorlock.is_adopted = False return no_doorlock @@ -350,12 +354,12 @@ def chime(): """Mock UniFi Protect Chime device.""" # disable pydantic validation so mocking can happen - Chime.__config__.validate_assignment = False + Chime.model_config["validate_assignment"] = False data = json.loads(load_fixture("sample_chime.json", integration=DOMAIN)) yield Chime.from_unifi_dict(**data) - Chime.__config__.validate_assignment = True + Chime.model_config["validate_assignment"] = True @pytest.fixture(name="fixed_now") diff --git a/tests/components/unifiprotect/test_binary_sensor.py b/tests/components/unifiprotect/test_binary_sensor.py index 31669aa62bb..3a8d5d952ce 100644 --- a/tests/components/unifiprotect/test_binary_sensor.py +++ b/tests/components/unifiprotect/test_binary_sensor.py @@ -305,7 +305,7 @@ async def test_binary_sensor_update_motion( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_motion_detected = True new_camera.last_motion_event_id = event.id @@ -352,7 +352,7 @@ async def test_binary_sensor_update_light_motion( api=ufp.api, ) - new_light = light.copy() + new_light = light.model_copy() new_light.is_pir_motion_detected = True new_light.last_motion_event_id = event.id @@ -386,7 +386,7 @@ async def test_binary_sensor_update_mount_type_window( assert state assert state.attributes[ATTR_DEVICE_CLASS] == BinarySensorDeviceClass.DOOR.value - new_sensor = sensor_all.copy() + new_sensor = sensor_all.model_copy() new_sensor.mount_type = MountType.WINDOW mock_msg = Mock() @@ -418,7 +418,7 @@ async def test_binary_sensor_update_mount_type_garage( assert state assert state.attributes[ATTR_DEVICE_CLASS] == BinarySensorDeviceClass.DOOR.value - new_sensor = sensor_all.copy() + new_sensor = sensor_all.model_copy() new_sensor.mount_type = MountType.GARAGE mock_msg = Mock() @@ -468,7 +468,7 @@ async def test_binary_sensor_package_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PACKAGE] = event.id @@ -501,7 +501,7 @@ async def test_binary_sensor_package_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PACKAGE] = event.id @@ -534,7 +534,7 @@ async def test_binary_sensor_package_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PACKAGE] = event.id @@ -611,7 +611,7 @@ async def test_binary_sensor_person_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True ufp.api.bootstrap.cameras = {new_camera.id: new_camera} @@ -641,7 +641,7 @@ async def test_binary_sensor_person_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PERSON] = event.id @@ -680,7 +680,7 @@ async def test_binary_sensor_person_detected( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PERSON] = event.id diff --git a/tests/components/unifiprotect/test_camera.py b/tests/components/unifiprotect/test_camera.py index 689352d8aa3..12b92beedd0 100644 --- a/tests/components/unifiprotect/test_camera.py +++ b/tests/components/unifiprotect/test_camera.py @@ -236,15 +236,15 @@ async def test_basic_setup( ) -> None: """Test working setup of unifiprotect entry.""" - camera_high_only = camera_all.copy() - camera_high_only.channels = [c.copy() for c in camera_all.channels] + camera_high_only = camera_all.model_copy() + camera_high_only.channels = [c.model_copy() for c in camera_all.channels] camera_high_only.name = "Test Camera 1" camera_high_only.channels[0].is_rtsp_enabled = True camera_high_only.channels[1].is_rtsp_enabled = False camera_high_only.channels[2].is_rtsp_enabled = False - camera_medium_only = camera_all.copy() - camera_medium_only.channels = [c.copy() for c in camera_all.channels] + camera_medium_only = camera_all.model_copy() + camera_medium_only.channels = [c.model_copy() for c in camera_all.channels] camera_medium_only.name = "Test Camera 2" camera_medium_only.channels[0].is_rtsp_enabled = False camera_medium_only.channels[1].is_rtsp_enabled = True @@ -252,8 +252,8 @@ async def test_basic_setup( camera_all.name = "Test Camera 3" - camera_no_channels = camera_all.copy() - camera_no_channels.channels = [c.copy() for c in camera_all.channels] + camera_no_channels = camera_all.model_copy() + camera_no_channels.channels = [c.model_copy() for c in camera_all.channels] camera_no_channels.name = "Test Camera 4" camera_no_channels.channels[0].is_rtsp_enabled = False camera_no_channels.channels[1].is_rtsp_enabled = False @@ -337,8 +337,8 @@ async def test_webrtc_support( camera_all: ProtectCamera, ) -> None: """Test webrtc support is available.""" - camera_high_only = camera_all.copy() - camera_high_only.channels = [c.copy() for c in camera_all.channels] + camera_high_only = camera_all.model_copy() + camera_high_only.channels = [c.model_copy() for c in camera_all.channels] camera_high_only.name = "Test Camera 1" camera_high_only.channels[0].is_rtsp_enabled = True camera_high_only.channels[1].is_rtsp_enabled = False @@ -355,7 +355,7 @@ async def test_adopt( ) -> None: """Test setting up camera with no camera channels.""" - camera1 = camera.copy() + camera1 = camera.model_copy() camera1.channels = [] await init_entry(hass, ufp, [camera1]) @@ -450,7 +450,7 @@ async def test_camera_interval_update( state = hass.states.get(entity_id) assert state and state.state == "idle" - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_recording = True ufp.api.bootstrap.cameras = {new_camera.id: new_camera} @@ -527,10 +527,10 @@ async def test_camera_ws_update( state = hass.states.get(entity_id) assert state and state.state == "idle" - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_recording = True - no_camera = camera.copy() + no_camera = camera.model_copy() no_camera.is_adopted = False ufp.api.bootstrap.cameras = {new_camera.id: new_camera} @@ -563,7 +563,7 @@ async def test_camera_ws_update_offline( assert state and state.state == "idle" # camera goes offline - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.state = StateType.DISCONNECTED mock_msg = Mock() @@ -601,7 +601,7 @@ async def test_camera_enable_motion( assert_entity_counts(hass, Platform.CAMERA, 2, 1) entity_id = "camera.test_camera_high_resolution_channel" - camera.__fields__["set_motion_detection"] = Mock(final=False) + camera.__pydantic_fields__["set_motion_detection"] = Mock(final=False, frozen=False) camera.set_motion_detection = AsyncMock() await hass.services.async_call( @@ -623,7 +623,7 @@ async def test_camera_disable_motion( assert_entity_counts(hass, Platform.CAMERA, 2, 1) entity_id = "camera.test_camera_high_resolution_channel" - camera.__fields__["set_motion_detection"] = Mock(final=False) + camera.__pydantic_fields__["set_motion_detection"] = Mock(final=False, frozen=False) camera.set_motion_detection = AsyncMock() await hass.services.async_call( diff --git a/tests/components/unifiprotect/test_event.py b/tests/components/unifiprotect/test_event.py index cc2195c1dba..6a26738f5e8 100644 --- a/tests/components/unifiprotect/test_event.py +++ b/tests/components/unifiprotect/test_event.py @@ -75,7 +75,7 @@ async def test_doorbell_ring( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.last_ring_event_id = "test_event_id" ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -107,7 +107,7 @@ async def test_doorbell_ring( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -137,7 +137,7 @@ async def test_doorbell_ring( api=ufp.api, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -190,7 +190,7 @@ async def test_doorbell_nfc_scanned( metadata={"nfc": {"nfc_id": "test_nfc_id", "user_id": "test_user_id"}}, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.last_nfc_card_scanned_event_id = "test_event_id" ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -248,7 +248,7 @@ async def test_doorbell_fingerprint_identified( metadata={"fingerprint": {"ulp_id": "test_ulp_id"}}, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.last_fingerprint_identified_event_id = "test_event_id" ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} @@ -306,7 +306,7 @@ async def test_doorbell_fingerprint_not_identified( metadata={"fingerprint": {}}, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.last_fingerprint_identified_event_id = "test_event_id" ufp.api.bootstrap.cameras = {new_camera.id: new_camera} ufp.api.bootstrap.events = {event.id: event} diff --git a/tests/components/unifiprotect/test_init.py b/tests/components/unifiprotect/test_init.py index 0d88754a110..b01c7e0cf4a 100644 --- a/tests/components/unifiprotect/test_init.py +++ b/tests/components/unifiprotect/test_init.py @@ -118,7 +118,7 @@ async def test_setup_too_old( ) -> None: """Test setup of unifiprotect entry with too old of version of UniFi Protect.""" - old_bootstrap = ufp.api.bootstrap.copy() + old_bootstrap = ufp.api.bootstrap.model_copy() old_bootstrap.nvr = old_nvr ufp.api.update.return_value = old_bootstrap ufp.api.bootstrap = old_bootstrap diff --git a/tests/components/unifiprotect/test_light.py b/tests/components/unifiprotect/test_light.py index bb0b6992e4e..724ed108673 100644 --- a/tests/components/unifiprotect/test_light.py +++ b/tests/components/unifiprotect/test_light.py @@ -74,7 +74,7 @@ async def test_light_update( await init_entry(hass, ufp, [light, unadopted_light]) assert_entity_counts(hass, Platform.LIGHT, 1, 1) - new_light = light.copy() + new_light = light.model_copy() new_light.is_light_on = True new_light.light_device_settings.led_level = LEDLevel(3) @@ -101,7 +101,7 @@ async def test_light_turn_on( assert_entity_counts(hass, Platform.LIGHT, 1, 1) entity_id = "light.test_light" - light.__fields__["set_light"] = Mock(final=False) + light.__pydantic_fields__["set_light"] = Mock(final=False, frozen=False) light.set_light = AsyncMock() await hass.services.async_call( @@ -123,7 +123,7 @@ async def test_light_turn_off( assert_entity_counts(hass, Platform.LIGHT, 1, 1) entity_id = "light.test_light" - light.__fields__["set_light"] = Mock(final=False) + light.__pydantic_fields__["set_light"] = Mock(final=False, frozen=False) light.set_light = AsyncMock() await hass.services.async_call( diff --git a/tests/components/unifiprotect/test_lock.py b/tests/components/unifiprotect/test_lock.py index 8b37b1c5928..9095c092ea2 100644 --- a/tests/components/unifiprotect/test_lock.py +++ b/tests/components/unifiprotect/test_lock.py @@ -75,7 +75,7 @@ async def test_lock_locked( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.CLOSED mock_msg = Mock() @@ -102,7 +102,7 @@ async def test_lock_unlocking( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.OPENING mock_msg = Mock() @@ -129,7 +129,7 @@ async def test_lock_locking( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.CLOSING mock_msg = Mock() @@ -156,7 +156,7 @@ async def test_lock_jammed( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.JAMMED_WHILE_CLOSING mock_msg = Mock() @@ -183,7 +183,7 @@ async def test_lock_unavailable( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.NOT_CALIBRATED mock_msg = Mock() @@ -210,7 +210,7 @@ async def test_lock_do_lock( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - doorlock.__fields__["close_lock"] = Mock(final=False) + doorlock.__pydantic_fields__["close_lock"] = Mock(final=False, frozen=False) doorlock.close_lock = AsyncMock() await hass.services.async_call( @@ -234,7 +234,7 @@ async def test_lock_do_unlock( await init_entry(hass, ufp, [doorlock, unadopted_doorlock]) assert_entity_counts(hass, Platform.LOCK, 1, 1) - new_lock = doorlock.copy() + new_lock = doorlock.model_copy() new_lock.lock_status = LockStatusType.CLOSED mock_msg = Mock() @@ -245,7 +245,7 @@ async def test_lock_do_unlock( ufp.ws_msg(mock_msg) await hass.async_block_till_done() - new_lock.__fields__["open_lock"] = Mock(final=False) + doorlock.__pydantic_fields__["open_lock"] = Mock(final=False, frozen=False) new_lock.open_lock = AsyncMock() await hass.services.async_call( diff --git a/tests/components/unifiprotect/test_media_player.py b/tests/components/unifiprotect/test_media_player.py index 642a3a1e372..6d27eb2a206 100644 --- a/tests/components/unifiprotect/test_media_player.py +++ b/tests/components/unifiprotect/test_media_player.py @@ -88,7 +88,7 @@ async def test_media_player_update( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.talkback_stream = Mock() new_camera.talkback_stream.is_running = True @@ -116,7 +116,7 @@ async def test_media_player_set_volume( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["set_speaker_volume"] = Mock(final=False) + doorbell.__pydantic_fields__["set_speaker_volume"] = Mock(final=False, frozen=False) doorbell.set_speaker_volume = AsyncMock() await hass.services.async_call( @@ -140,7 +140,7 @@ async def test_media_player_stop( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.talkback_stream = AsyncMock() new_camera.talkback_stream.is_running = True @@ -173,9 +173,11 @@ async def test_media_player_play( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["stop_audio"] = Mock(final=False) - doorbell.__fields__["play_audio"] = Mock(final=False) - doorbell.__fields__["wait_until_audio_completes"] = Mock(final=False) + doorbell.__pydantic_fields__["stop_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( + final=False, frozen=False + ) doorbell.stop_audio = AsyncMock() doorbell.play_audio = AsyncMock() doorbell.wait_until_audio_completes = AsyncMock() @@ -208,9 +210,11 @@ async def test_media_player_play_media_source( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["stop_audio"] = Mock(final=False) - doorbell.__fields__["play_audio"] = Mock(final=False) - doorbell.__fields__["wait_until_audio_completes"] = Mock(final=False) + doorbell.__pydantic_fields__["stop_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( + final=False, frozen=False + ) doorbell.stop_audio = AsyncMock() doorbell.play_audio = AsyncMock() doorbell.wait_until_audio_completes = AsyncMock() @@ -247,7 +251,7 @@ async def test_media_player_play_invalid( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["play_audio"] = Mock(final=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) doorbell.play_audio = AsyncMock() with pytest.raises(HomeAssistantError): @@ -276,8 +280,10 @@ async def test_media_player_play_error( await init_entry(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.MEDIA_PLAYER, 1, 1) - doorbell.__fields__["play_audio"] = Mock(final=False) - doorbell.__fields__["wait_until_audio_completes"] = Mock(final=False) + doorbell.__pydantic_fields__["play_audio"] = Mock(final=False, frozen=False) + doorbell.__pydantic_fields__["wait_until_audio_completes"] = Mock( + final=False, frozen=False + ) doorbell.play_audio = AsyncMock(side_effect=StreamError) doorbell.wait_until_audio_completes = AsyncMock() diff --git a/tests/components/unifiprotect/test_media_source.py b/tests/components/unifiprotect/test_media_source.py index 18944460ca5..61f9680bdbc 100644 --- a/tests/components/unifiprotect/test_media_source.py +++ b/tests/components/unifiprotect/test_media_source.py @@ -204,9 +204,9 @@ async def test_browse_media_root_multiple_consoles( await hass.config_entries.async_setup(ufp.entry.entry_id) await hass.async_block_till_done() - bootstrap2 = bootstrap.copy() + bootstrap2 = bootstrap.model_copy() bootstrap2._has_media = True - bootstrap2.nvr = bootstrap.nvr.copy() + bootstrap2.nvr = bootstrap.nvr.model_copy() bootstrap2.nvr.id = "test_id2" bootstrap2.nvr.mac = "A2E00C826924" bootstrap2.nvr.name = "UnifiProtect2" @@ -270,9 +270,9 @@ async def test_browse_media_root_multiple_consoles_only_one_media( await hass.config_entries.async_setup(ufp.entry.entry_id) await hass.async_block_till_done() - bootstrap2 = bootstrap.copy() + bootstrap2 = bootstrap.model_copy() bootstrap2._has_media = False - bootstrap2.nvr = bootstrap.nvr.copy() + bootstrap2.nvr = bootstrap.nvr.model_copy() bootstrap2.nvr.id = "test_id2" bootstrap2.nvr.mac = "A2E00C826924" bootstrap2.nvr.name = "UnifiProtect2" diff --git a/tests/components/unifiprotect/test_number.py b/tests/components/unifiprotect/test_number.py index 77a409551b1..1838a574bc4 100644 --- a/tests/components/unifiprotect/test_number.py +++ b/tests/components/unifiprotect/test_number.py @@ -162,7 +162,7 @@ async def test_number_light_sensitivity( description = LIGHT_NUMBERS[0] assert description.ufp_set_method is not None - light.__fields__["set_sensitivity"] = Mock(final=False) + light.__pydantic_fields__["set_sensitivity"] = Mock(final=False, frozen=False) light.set_sensitivity = AsyncMock() _, entity_id = ids_from_device_description(Platform.NUMBER, light, description) @@ -184,7 +184,7 @@ async def test_number_light_duration( description = LIGHT_NUMBERS[1] - light.__fields__["set_duration"] = Mock(final=False) + light.__pydantic_fields__["set_duration"] = Mock(final=False, frozen=False) light.set_duration = AsyncMock() _, entity_id = ids_from_device_description(Platform.NUMBER, light, description) @@ -210,7 +210,9 @@ async def test_number_camera_simple( assert description.ufp_set_method is not None - camera.__fields__[description.ufp_set_method] = Mock(final=False) + camera.__pydantic_fields__[description.ufp_set_method] = Mock( + final=False, frozen=False + ) setattr(camera, description.ufp_set_method, AsyncMock()) _, entity_id = ids_from_device_description(Platform.NUMBER, camera, description) @@ -230,7 +232,9 @@ async def test_number_lock_auto_close( description = DOORLOCK_NUMBERS[0] - doorlock.__fields__["set_auto_close_time"] = Mock(final=False) + doorlock.__pydantic_fields__["set_auto_close_time"] = Mock( + final=False, frozen=False + ) doorlock.set_auto_close_time = AsyncMock() _, entity_id = ids_from_device_description(Platform.NUMBER, doorlock, description) diff --git a/tests/components/unifiprotect/test_recorder.py b/tests/components/unifiprotect/test_recorder.py index fe102c2fdbc..1f025a63306 100644 --- a/tests/components/unifiprotect/test_recorder.py +++ b/tests/components/unifiprotect/test_recorder.py @@ -51,7 +51,7 @@ async def test_exclude_attributes( camera_id=doorbell.id, ) - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.is_motion_detected = True new_camera.last_motion_event_id = event.id diff --git a/tests/components/unifiprotect/test_select.py b/tests/components/unifiprotect/test_select.py index 8795af57214..6db3ae22dcb 100644 --- a/tests/components/unifiprotect/test_select.py +++ b/tests/components/unifiprotect/test_select.py @@ -262,7 +262,7 @@ async def test_select_update_doorbell_settings( expected_length += 1 new_nvr = copy(ufp.api.bootstrap.nvr) - new_nvr.__fields__["update_all_messages"] = Mock(final=False) + new_nvr.__pydantic_fields__["update_all_messages"] = Mock(final=False, frozen=False) new_nvr.update_all_messages = Mock() new_nvr.doorbell_settings.all_messages = [ @@ -304,7 +304,7 @@ async def test_select_update_doorbell_message( assert state assert state.state == "Default Message (Welcome)" - new_camera = doorbell.copy() + new_camera = doorbell.model_copy() new_camera.lcd_message = LCDMessage( type=DoorbellMessageType.CUSTOM_MESSAGE, text="Test" ) @@ -332,7 +332,7 @@ async def test_select_set_option_light_motion( _, entity_id = ids_from_device_description(Platform.SELECT, light, LIGHT_SELECTS[0]) - light.__fields__["set_light_settings"] = Mock(final=False) + light.__pydantic_fields__["set_light_settings"] = Mock(final=False, frozen=False) light.set_light_settings = AsyncMock() await hass.services.async_call( @@ -357,7 +357,7 @@ async def test_select_set_option_light_camera( _, entity_id = ids_from_device_description(Platform.SELECT, light, LIGHT_SELECTS[1]) - light.__fields__["set_paired_camera"] = Mock(final=False) + light.__pydantic_fields__["set_paired_camera"] = Mock(final=False, frozen=False) light.set_paired_camera = AsyncMock() camera = list(light.api.bootstrap.cameras.values())[0] @@ -393,7 +393,7 @@ async def test_select_set_option_camera_recording( Platform.SELECT, doorbell, CAMERA_SELECTS[0] ) - doorbell.__fields__["set_recording_mode"] = Mock(final=False) + doorbell.__pydantic_fields__["set_recording_mode"] = Mock(final=False, frozen=False) doorbell.set_recording_mode = AsyncMock() await hass.services.async_call( @@ -418,7 +418,7 @@ async def test_select_set_option_camera_ir( Platform.SELECT, doorbell, CAMERA_SELECTS[1] ) - doorbell.__fields__["set_ir_led_model"] = Mock(final=False) + doorbell.__pydantic_fields__["set_ir_led_model"] = Mock(final=False, frozen=False) doorbell.set_ir_led_model = AsyncMock() await hass.services.async_call( @@ -443,7 +443,7 @@ async def test_select_set_option_camera_doorbell_custom( Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( @@ -470,7 +470,7 @@ async def test_select_set_option_camera_doorbell_unifi( Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( @@ -512,7 +512,7 @@ async def test_select_set_option_camera_doorbell_default( Platform.SELECT, doorbell, CAMERA_SELECTS[2] ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( @@ -541,7 +541,7 @@ async def test_select_set_option_viewer( Platform.SELECT, viewer, VIEWER_SELECTS[0] ) - viewer.__fields__["set_liveview"] = Mock(final=False) + viewer.__pydantic_fields__["set_liveview"] = Mock(final=False, frozen=False) viewer.set_liveview = AsyncMock() liveview = list(viewer.api.bootstrap.liveviews.values())[0] diff --git a/tests/components/unifiprotect/test_sensor.py b/tests/components/unifiprotect/test_sensor.py index bc5f372c598..9489a49bf22 100644 --- a/tests/components/unifiprotect/test_sensor.py +++ b/tests/components/unifiprotect/test_sensor.py @@ -464,7 +464,7 @@ async def test_sensor_update_alarm( api=ufp.api, ) - new_sensor = sensor_all.copy() + new_sensor = sensor_all.model_copy() new_sensor.set_alarm_timeout() new_sensor.last_alarm_event_id = event.id @@ -548,7 +548,7 @@ async def test_camera_update_license_plate( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id @@ -663,7 +663,7 @@ async def test_camera_update_license_plate_changes_number_during_detect( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id @@ -750,7 +750,7 @@ async def test_camera_update_license_plate_multiple_updates( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id @@ -873,7 +873,7 @@ async def test_camera_update_license_no_dupes( api=ufp.api, ) - new_camera = camera.copy() + new_camera = camera.model_copy() new_camera.is_smart_detected = True new_camera.last_smart_detect_event_ids[SmartDetectObjectType.LICENSE_PLATE] = ( event.id diff --git a/tests/components/unifiprotect/test_services.py b/tests/components/unifiprotect/test_services.py index 6808bacb40c..84e0e74a492 100644 --- a/tests/components/unifiprotect/test_services.py +++ b/tests/components/unifiprotect/test_services.py @@ -56,7 +56,9 @@ async def test_global_service_bad_device( """Test global service, invalid device ID.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock() with pytest.raises(HomeAssistantError): @@ -75,7 +77,9 @@ async def test_global_service_exception( """Test global service, unexpected error.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock(side_effect=BadRequest) with pytest.raises(HomeAssistantError): @@ -94,7 +98,9 @@ async def test_add_doorbell_text( """Test add_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock() await hass.services.async_call( @@ -112,7 +118,9 @@ async def test_remove_doorbell_text( """Test remove_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["remove_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["remove_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.remove_custom_doorbell_message = AsyncMock() await hass.services.async_call( @@ -129,7 +137,9 @@ async def test_add_doorbell_text_disabled_config_entry( ) -> None: """Test add_doorbell_text service.""" nvr = ufp.api.bootstrap.nvr - nvr.__fields__["add_custom_doorbell_message"] = Mock(final=False) + nvr.__pydantic_fields__["add_custom_doorbell_message"] = Mock( + final=False, frozen=False + ) nvr.add_custom_doorbell_message = AsyncMock() await hass.config_entries.async_set_disabled_by( @@ -158,10 +168,10 @@ async def test_set_chime_paired_doorbells( ufp.api.update_device = AsyncMock() - camera1 = doorbell.copy() + camera1 = doorbell.model_copy() camera1.name = "Test Camera 1" - camera2 = doorbell.copy() + camera2 = doorbell.model_copy() camera2.name = "Test Camera 2" await init_entry(hass, ufp, [camera1, camera2, chime]) diff --git a/tests/components/unifiprotect/test_switch.py b/tests/components/unifiprotect/test_switch.py index 9e0e9efa0ce..194e46681ce 100644 --- a/tests/components/unifiprotect/test_switch.py +++ b/tests/components/unifiprotect/test_switch.py @@ -89,7 +89,7 @@ async def test_switch_nvr(hass: HomeAssistant, ufp: MockUFPFixture) -> None: assert_entity_counts(hass, Platform.SWITCH, 2, 2) nvr = ufp.api.bootstrap.nvr - nvr.__fields__["set_insights"] = Mock(final=False) + nvr.__pydantic_fields__["set_insights"] = Mock(final=False, frozen=False) nvr.set_insights = AsyncMock() entity_id = "switch.unifiprotect_insights_enabled" @@ -272,7 +272,7 @@ async def test_switch_light_status( description = LIGHT_SWITCHES[1] - light.__fields__["set_status_light"] = Mock(final=False) + light.__pydantic_fields__["set_status_light"] = Mock(final=False, frozen=False) light.set_status_light = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, light, description) @@ -300,7 +300,7 @@ async def test_switch_camera_ssh( description = CAMERA_SWITCHES[0] - doorbell.__fields__["set_ssh"] = Mock(final=False) + doorbell.__pydantic_fields__["set_ssh"] = Mock(final=False, frozen=False) doorbell.set_ssh = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) @@ -333,7 +333,9 @@ async def test_switch_camera_simple( assert description.ufp_set_method is not None - doorbell.__fields__[description.ufp_set_method] = Mock(final=False) + doorbell.__pydantic_fields__[description.ufp_set_method] = Mock( + final=False, frozen=False + ) setattr(doorbell, description.ufp_set_method, AsyncMock()) set_method = getattr(doorbell, description.ufp_set_method) @@ -362,7 +364,7 @@ async def test_switch_camera_highfps( description = CAMERA_SWITCHES[3] - doorbell.__fields__["set_video_mode"] = Mock(final=False) + doorbell.__pydantic_fields__["set_video_mode"] = Mock(final=False, frozen=False) doorbell.set_video_mode = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) @@ -393,7 +395,7 @@ async def test_switch_camera_privacy( description = PRIVACY_MODE_SWITCH - doorbell.__fields__["set_privacy"] = Mock(final=False) + doorbell.__pydantic_fields__["set_privacy"] = Mock(final=False, frozen=False) doorbell.set_privacy = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) @@ -409,7 +411,7 @@ async def test_switch_camera_privacy( doorbell.set_privacy.assert_called_with(True, 0, RecordingMode.NEVER) - new_doorbell = doorbell.copy() + new_doorbell = doorbell.model_copy() new_doorbell.add_privacy_zone() new_doorbell.mic_volume = 0 new_doorbell.recording_settings.mode = RecordingMode.NEVER @@ -445,7 +447,7 @@ async def test_switch_camera_privacy_already_on( description = PRIVACY_MODE_SWITCH - doorbell.__fields__["set_privacy"] = Mock(final=False) + doorbell.__pydantic_fields__["set_privacy"] = Mock(final=False, frozen=False) doorbell.set_privacy = AsyncMock() _, entity_id = ids_from_device_description(Platform.SWITCH, doorbell, description) diff --git a/tests/components/unifiprotect/test_text.py b/tests/components/unifiprotect/test_text.py index 3ca11744abb..c34611c43a9 100644 --- a/tests/components/unifiprotect/test_text.py +++ b/tests/components/unifiprotect/test_text.py @@ -78,7 +78,7 @@ async def test_text_camera_set( Platform.TEXT, doorbell, description ) - doorbell.__fields__["set_lcd_text"] = Mock(final=False) + doorbell.__pydantic_fields__["set_lcd_text"] = Mock(final=False, frozen=False) doorbell.set_lcd_text = AsyncMock() await hass.services.async_call( From bce6127264370f67ff99e7fad3a0bb13227349d9 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Sat, 14 Dec 2024 03:36:15 -0500 Subject: [PATCH 233/677] Bump `nice-go` to 1.0.0 (#133185) * Bump Nice G.O. to 1.0.0 * Mypy * Pytest --- homeassistant/components/nice_go/coordinator.py | 1 - homeassistant/components/nice_go/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/nice_go/fixtures/get_all_barriers.json | 4 ---- tests/components/nice_go/test_init.py | 1 - 6 files changed, 3 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/nice_go/coordinator.py b/homeassistant/components/nice_go/coordinator.py index 29c0d8233fe..07b20bbbf10 100644 --- a/homeassistant/components/nice_go/coordinator.py +++ b/homeassistant/components/nice_go/coordinator.py @@ -239,7 +239,6 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): ].type, # Device type is not sent in device state update, and it can't change, so we just reuse the existing one BarrierState( deviceId=raw_data["deviceId"], - desired=json.loads(raw_data["desired"]), reported=json.loads(raw_data["reported"]), connectionState=ConnectionState( connected=raw_data["connectionState"]["connected"], diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index 817d7ef9bc9..1af23ec4d9b 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["nice_go"], - "requirements": ["nice-go==0.3.10"] + "requirements": ["nice-go==1.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1e271ff1d57..3994f0f3029 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1468,7 +1468,7 @@ nextdns==4.0.0 nibe==2.14.0 # homeassistant.components.nice_go -nice-go==0.3.10 +nice-go==1.0.0 # homeassistant.components.niko_home_control niko-home-control==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 95d610361d9..f3309cf24ea 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1231,7 +1231,7 @@ nextdns==4.0.0 nibe==2.14.0 # homeassistant.components.nice_go -nice-go==0.3.10 +nice-go==1.0.0 # homeassistant.components.niko_home_control niko-home-control==0.2.1 diff --git a/tests/components/nice_go/fixtures/get_all_barriers.json b/tests/components/nice_go/fixtures/get_all_barriers.json index 84799e0dd32..5a7607612c1 100644 --- a/tests/components/nice_go/fixtures/get_all_barriers.json +++ b/tests/components/nice_go/fixtures/get_all_barriers.json @@ -11,7 +11,6 @@ ], "state": { "deviceId": "1", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 1", "autoDisabled": false, @@ -42,7 +41,6 @@ ], "state": { "deviceId": "2", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 2", "autoDisabled": false, @@ -73,7 +71,6 @@ ], "state": { "deviceId": "3", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 3", "autoDisabled": false, @@ -101,7 +98,6 @@ ], "state": { "deviceId": "4", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 4", "autoDisabled": false, diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py index 4eb3851516e..051c6623b23 100644 --- a/tests/components/nice_go/test_init.py +++ b/tests/components/nice_go/test_init.py @@ -81,7 +81,6 @@ async def test_firmware_update_required( "displayName": "test-display-name", "migrationStatus": "NOT_STARTED", }, - desired=None, connectionState=None, version=None, timestamp=None, From d2dfba3116d3bd537c0f04a367d072f7d9ec76f7 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 14 Dec 2024 12:00:28 +0100 Subject: [PATCH 234/677] Improve Slide Local device tests (#133197) --- .../components/slide_local/entity.py | 10 +++--- tests/components/slide_local/conftest.py | 20 +++++------ .../slide_local/fixtures/slide_1.json | 4 +-- .../slide_local/snapshots/test_init.ambr | 33 +++++++++++++++++++ .../slide_local/test_config_flow.py | 8 ++--- tests/components/slide_local/test_init.py | 29 ++++++++++++++++ 6 files changed, 81 insertions(+), 23 deletions(-) create mode 100644 tests/components/slide_local/snapshots/test_init.ambr create mode 100644 tests/components/slide_local/test_init.py diff --git a/homeassistant/components/slide_local/entity.py b/homeassistant/components/slide_local/entity.py index c1dbc101e6f..51269649add 100644 --- a/homeassistant/components/slide_local/entity.py +++ b/homeassistant/components/slide_local/entity.py @@ -1,6 +1,6 @@ """Entities for slide_local integration.""" -from homeassistant.const import CONF_MAC +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -12,18 +12,16 @@ class SlideEntity(CoordinatorEntity[SlideCoordinator]): _attr_has_entity_name = True - def __init__( - self, - coordinator: SlideCoordinator, - ) -> None: + def __init__(self, coordinator: SlideCoordinator) -> None: """Initialize the Slide device.""" super().__init__(coordinator) self._attr_device_info = DeviceInfo( manufacturer="Innovation in Motion", - connections={(CONF_MAC, coordinator.data["mac"])}, + connections={(dr.CONNECTION_NETWORK_MAC, coordinator.data["mac"])}, name=coordinator.data["device_name"], sw_version=coordinator.api_version, + hw_version=coordinator.data["board_rev"], serial_number=coordinator.data["mac"], configuration_url=f"http://{coordinator.host}", ) diff --git a/tests/components/slide_local/conftest.py b/tests/components/slide_local/conftest.py index 0d70d1989e7..ad2734bbb64 100644 --- a/tests/components/slide_local/conftest.py +++ b/tests/components/slide_local/conftest.py @@ -6,7 +6,7 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.slide_local.const import CONF_INVERT_POSITION, DOMAIN -from homeassistant.const import CONF_API_VERSION, CONF_HOST +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_MAC from .const import HOST, SLIDE_INFO_DATA @@ -22,6 +22,7 @@ def mock_config_entry() -> MockConfigEntry: data={ CONF_HOST: HOST, CONF_API_VERSION: 2, + CONF_MAC: "12:34:56:78:90:ab", }, options={ CONF_INVERT_POSITION: False, @@ -33,25 +34,22 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_slide_api(): +def mock_slide_api() -> Generator[AsyncMock]: """Build a fixture for the SlideLocalApi that connects successfully and returns one device.""" - mock_slide_local_api = AsyncMock() - mock_slide_local_api.slide_info.return_value = SLIDE_INFO_DATA - with ( patch( - "homeassistant.components.slide_local.SlideLocalApi", + "homeassistant.components.slide_local.coordinator.SlideLocalApi", autospec=True, - return_value=mock_slide_local_api, - ), + ) as mock_slide_local_api, patch( "homeassistant.components.slide_local.config_flow.SlideLocalApi", - autospec=True, - return_value=mock_slide_local_api, + new=mock_slide_local_api, ), ): - yield mock_slide_local_api + client = mock_slide_local_api.return_value + client.slide_info.return_value = SLIDE_INFO_DATA + yield client @pytest.fixture diff --git a/tests/components/slide_local/fixtures/slide_1.json b/tests/components/slide_local/fixtures/slide_1.json index e8c3c85a324..6367b94f243 100644 --- a/tests/components/slide_local/fixtures/slide_1.json +++ b/tests/components/slide_local/fixtures/slide_1.json @@ -1,6 +1,6 @@ { - "slide_id": "slide_300000000000", - "mac": "300000000000", + "slide_id": "slide_1234567890ab", + "mac": "1234567890ab", "board_rev": 1, "device_name": "slide bedroom", "zone_name": "bedroom", diff --git a/tests/components/slide_local/snapshots/test_init.ambr b/tests/components/slide_local/snapshots/test_init.ambr new file mode 100644 index 00000000000..d90f72e4b05 --- /dev/null +++ b/tests/components/slide_local/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://127.0.0.2', + 'connections': set({ + tuple( + 'mac', + '12:34:56:78:90:ab', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 1, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Innovation in Motion', + 'model': None, + 'model_id': None, + 'name': 'slide bedroom', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '1234567890ab', + 'suggested_area': None, + 'sw_version': 2, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/slide_local/test_config_flow.py b/tests/components/slide_local/test_config_flow.py index 35aa99a90d7..025f8c323ff 100644 --- a/tests/components/slide_local/test_config_flow.py +++ b/tests/components/slide_local/test_config_flow.py @@ -63,7 +63,7 @@ async def test_user( assert result2["data"][CONF_HOST] == HOST assert result2["data"][CONF_PASSWORD] == "pwd" assert result2["data"][CONF_API_VERSION] == 2 - assert result2["result"].unique_id == "30:00:00:00:00:00" + assert result2["result"].unique_id == "12:34:56:78:90:ab" assert not result2["options"][CONF_INVERT_POSITION] assert len(mock_setup_entry.mock_calls) == 1 @@ -96,7 +96,7 @@ async def test_user_api_1( assert result2["data"][CONF_HOST] == HOST assert result2["data"][CONF_PASSWORD] == "pwd" assert result2["data"][CONF_API_VERSION] == 1 - assert result2["result"].unique_id == "30:00:00:00:00:00" + assert result2["result"].unique_id == "12:34:56:78:90:ab" assert not result2["options"][CONF_INVERT_POSITION] assert len(mock_setup_entry.mock_calls) == 1 @@ -143,7 +143,7 @@ async def test_user_api_error( assert result2["data"][CONF_HOST] == HOST assert result2["data"][CONF_PASSWORD] == "pwd" assert result2["data"][CONF_API_VERSION] == 1 - assert result2["result"].unique_id == "30:00:00:00:00:00" + assert result2["result"].unique_id == "12:34:56:78:90:ab" assert not result2["options"][CONF_INVERT_POSITION] assert len(mock_setup_entry.mock_calls) == 1 @@ -259,7 +259,7 @@ async def test_abort_if_already_setup( ) -> None: """Test we abort if the device is already setup.""" - MockConfigEntry(domain=DOMAIN, unique_id="30:00:00:00:00:00").add_to_hass(hass) + MockConfigEntry(domain=DOMAIN, unique_id="12:34:56:78:90:ab").add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/slide_local/test_init.py b/tests/components/slide_local/test_init.py new file mode 100644 index 00000000000..7b0a2d83164 --- /dev/null +++ b/tests/components/slide_local/test_init.py @@ -0,0 +1,29 @@ +"""Tests for the Slide Local integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_platform + +from tests.common import MockConfigEntry + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, "1234567890ab")} + ) + assert device_entry is not None + assert device_entry == snapshot From ca1bcbf5d57f636bcec8a0c0fb86513c31320f39 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Sat, 14 Dec 2024 12:07:38 +0100 Subject: [PATCH 235/677] Bump openwebifpy to 4.3.0 (#133188) --- homeassistant/components/enigma2/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/enigma2/manifest.json b/homeassistant/components/enigma2/manifest.json index 1a0875b04c0..7d6887ad14c 100644 --- a/homeassistant/components/enigma2/manifest.json +++ b/homeassistant/components/enigma2/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["openwebif"], - "requirements": ["openwebifpy==4.2.7"] + "requirements": ["openwebifpy==4.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3994f0f3029..0f24315caf1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1556,7 +1556,7 @@ openhomedevice==2.2.0 opensensemap-api==0.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.7 +openwebifpy==4.3.0 # homeassistant.components.luci openwrt-luci-rpc==1.1.17 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f3309cf24ea..d6e9685d8d7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1298,7 +1298,7 @@ openerz-api==0.3.0 openhomedevice==2.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.7 +openwebifpy==4.3.0 # homeassistant.components.opower opower==0.8.6 From 06391d4635aaf4dc3b528c78d892738be5b94859 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sat, 14 Dec 2024 12:10:28 +0100 Subject: [PATCH 236/677] Add reconfiguration to slide_local (#133182) Co-authored-by: Joostlek --- .../components/slide_local/__init__.py | 7 ++++ .../components/slide_local/config_flow.py | 35 ++++++++++++++++++- homeassistant/components/slide_local/cover.py | 6 ++-- .../components/slide_local/quality_scale.yaml | 2 +- .../components/slide_local/strings.json | 14 ++++++++ .../slide_local/test_config_flow.py | 27 +++++++++++++- 6 files changed, 85 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/slide_local/__init__.py b/homeassistant/components/slide_local/__init__.py index 878830fe513..dbe4d516d75 100644 --- a/homeassistant/components/slide_local/__init__.py +++ b/homeassistant/components/slide_local/__init__.py @@ -25,9 +25,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> boo await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) + return True +async def update_listener(hass: HomeAssistant, entry: SlideConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + async def async_unload_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/slide_local/config_flow.py b/homeassistant/components/slide_local/config_flow.py index bc5033e972b..3ccc89be375 100644 --- a/homeassistant/components/slide_local/config_flow.py +++ b/homeassistant/components/slide_local/config_flow.py @@ -15,10 +15,12 @@ from goslideapi.goslideapi import ( import voluptuous as vol from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_MAC, CONF_PASSWORD +from homeassistant.core import callback from homeassistant.helpers.device_registry import format_mac +from . import SlideConfigEntry from .const import CONF_INVERT_POSITION, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -34,6 +36,14 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 MINOR_VERSION = 1 + @staticmethod + @callback + def async_get_options_flow( + config_entry: SlideConfigEntry, + ) -> SlideOptionsFlowHandler: + """Get the options flow for this handler.""" + return SlideOptionsFlowHandler() + async def async_test_connection( self, user_input: dict[str, str | int] ) -> dict[str, str]: @@ -181,3 +191,26 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN): "host": self._host, }, ) + + +class SlideOptionsFlowHandler(OptionsFlow): + """Handle a options flow for slide_local.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + if user_input is not None: + return self.async_create_entry(data=user_input) + + return self.async_show_form( + step_id="init", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_INVERT_POSITION): bool, + } + ), + {CONF_INVERT_POSITION: self.config_entry.options[CONF_INVERT_POSITION]}, + ), + ) diff --git a/homeassistant/components/slide_local/cover.py b/homeassistant/components/slide_local/cover.py index 1bf026746c6..cf04f46d139 100644 --- a/homeassistant/components/slide_local/cover.py +++ b/homeassistant/components/slide_local/cover.py @@ -54,7 +54,7 @@ class SlideCoverLocal(SlideEntity, CoverEntity): super().__init__(coordinator) self._attr_name = None - self._invert = entry.options[CONF_INVERT_POSITION] + self.invert = entry.options[CONF_INVERT_POSITION] self._attr_unique_id = coordinator.data["mac"] @property @@ -79,7 +79,7 @@ class SlideCoverLocal(SlideEntity, CoverEntity): if pos is not None: if (1 - pos) <= DEFAULT_OFFSET or pos <= DEFAULT_OFFSET: pos = round(pos) - if not self._invert: + if not self.invert: pos = 1 - pos pos = int(pos * 100) return pos @@ -101,7 +101,7 @@ class SlideCoverLocal(SlideEntity, CoverEntity): async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" position = kwargs[ATTR_POSITION] / 100 - if not self._invert: + if not self.invert: position = 1 - position if self.coordinator.data["pos"] is not None: diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 048a428f236..4eda62f6497 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -33,7 +33,7 @@ rules: test-coverage: todo integration-owner: done docs-installation-parameters: done - docs-configuration-parameters: todo + docs-configuration-parameters: done # Gold entity-translations: todo diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index 38090c7e62d..3e693fe51b9 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -27,6 +27,20 @@ "unknown": "[%key:common::config_flow::error::unknown%]" } }, + "options": { + "step": { + "init": { + "title": "Configure Slide", + "description": "Reconfigure the Slide device", + "data": { + "invert_position": "Invert position" + }, + "data_description": { + "invert_position": "Invert the position of your slide cover." + } + } + } + }, "exceptions": { "update_error": { "message": "Error while updating data from the API." diff --git a/tests/components/slide_local/test_config_flow.py b/tests/components/slide_local/test_config_flow.py index 025f8c323ff..48be7dd7850 100644 --- a/tests/components/slide_local/test_config_flow.py +++ b/tests/components/slide_local/test_config_flow.py @@ -14,10 +14,11 @@ import pytest from homeassistant.components.slide_local.const import CONF_INVERT_POSITION, DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF -from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_PASSWORD +from homeassistant.const import CONF_API_VERSION, CONF_HOST, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_platform from .const import HOST, SLIDE_INFO_DATA from tests.common import MockConfigEntry @@ -371,3 +372,27 @@ async def test_zeroconf_connection_error( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "discovery_connection_failed" + + +async def test_options_flow( + hass: HomeAssistant, mock_slide_api: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test options flow works correctly.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_INVERT_POSITION: True, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert mock_config_entry.options == { + CONF_INVERT_POSITION: True, + } From d85d98607589e76ef89c3917c4f6384df6591700 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sat, 14 Dec 2024 12:19:42 +0100 Subject: [PATCH 237/677] Add button entity to slide_local (#133141) Co-authored-by: Joostlek --- .../components/slide_local/__init__.py | 6 +-- .../components/slide_local/button.py | 42 +++++++++++++++++ .../components/slide_local/icons.json | 9 ++++ .../components/slide_local/strings.json | 7 +++ .../slide_local/snapshots/test_button.ambr | 47 +++++++++++++++++++ tests/components/slide_local/test_button.py | 46 ++++++++++++++++++ 6 files changed, 153 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/slide_local/button.py create mode 100644 homeassistant/components/slide_local/icons.json create mode 100644 tests/components/slide_local/snapshots/test_button.ambr create mode 100644 tests/components/slide_local/test_button.py diff --git a/homeassistant/components/slide_local/__init__.py b/homeassistant/components/slide_local/__init__.py index dbe4d516d75..6f329477600 100644 --- a/homeassistant/components/slide_local/__init__.py +++ b/homeassistant/components/slide_local/__init__.py @@ -2,16 +2,14 @@ from __future__ import annotations -from goslideapi.goslideapi import GoSlideLocal as SlideLocalApi - from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from .coordinator import SlideCoordinator -PLATFORMS = [Platform.COVER] -type SlideConfigEntry = ConfigEntry[SlideLocalApi] +PLATFORMS = [Platform.BUTTON, Platform.COVER] +type SlideConfigEntry = ConfigEntry[SlideCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: SlideConfigEntry) -> bool: diff --git a/homeassistant/components/slide_local/button.py b/homeassistant/components/slide_local/button.py new file mode 100644 index 00000000000..9c285881116 --- /dev/null +++ b/homeassistant/components/slide_local/button.py @@ -0,0 +1,42 @@ +"""Support for Slide button.""" + +from __future__ import annotations + +from homeassistant.components.button import ButtonEntity +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SlideConfigEntry +from .coordinator import SlideCoordinator +from .entity import SlideEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SlideConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up button for Slide platform.""" + + coordinator = entry.runtime_data + + async_add_entities([SlideButton(coordinator)]) + + +class SlideButton(SlideEntity, ButtonEntity): + """Defines a Slide button.""" + + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "calibrate" + + def __init__(self, coordinator: SlideCoordinator) -> None: + """Initialize the slide button.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.data["mac"]}-calibrate" + + async def async_press(self) -> None: + """Send out a calibrate command.""" + await self.coordinator.slide.slide_calibrate(self.coordinator.host) diff --git a/homeassistant/components/slide_local/icons.json b/homeassistant/components/slide_local/icons.json new file mode 100644 index 00000000000..70d53e7f7a3 --- /dev/null +++ b/homeassistant/components/slide_local/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "button": { + "calibrate": { + "default": "mdi:tape-measure" + } + } + } +} diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index 3e693fe51b9..c593dea8ed7 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -41,6 +41,13 @@ } } }, + "entity": { + "button": { + "calibrate": { + "name": "Calibrate" + } + } + }, "exceptions": { "update_error": { "message": "Error while updating data from the API." diff --git a/tests/components/slide_local/snapshots/test_button.ambr b/tests/components/slide_local/snapshots/test_button.ambr new file mode 100644 index 00000000000..549538f1361 --- /dev/null +++ b/tests/components/slide_local/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[button.slide_bedroom_calibrate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.slide_bedroom_calibrate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Calibrate', + 'platform': 'slide_local', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibrate', + 'unique_id': '1234567890ab-calibrate', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.slide_bedroom_calibrate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'slide bedroom Calibrate', + }), + 'context': , + 'entity_id': 'button.slide_bedroom_calibrate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/slide_local/test_button.py b/tests/components/slide_local/test_button.py new file mode 100644 index 00000000000..646c8fd7ef3 --- /dev/null +++ b/tests/components/slide_local/test_button.py @@ -0,0 +1,46 @@ +"""Tests for the Slide Local button platform.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_platform(hass, mock_config_entry, [Platform.BUTTON]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_pressing_button( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test pressing button.""" + await setup_platform(hass, mock_config_entry, [Platform.BUTTON]) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.slide_bedroom_calibrate", + }, + blocking=True, + ) + mock_slide_api.slide_calibrate.assert_called_once() From 980b8a91e62c449fab558318573fa756818875a6 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sat, 14 Dec 2024 14:21:19 +0100 Subject: [PATCH 238/677] Revert "Simplify recorder RecorderRunsManager" (#133201) Revert "Simplify recorder RecorderRunsManager (#131785)" This reverts commit cf0ee635077114961f6e508be56ce7620c718c18. --- .../recorder/table_managers/recorder_runs.py | 73 ++++++++++++++++--- .../table_managers/test_recorder_runs.py | 32 ++++++-- 2 files changed, 90 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/recorder/table_managers/recorder_runs.py b/homeassistant/components/recorder/table_managers/recorder_runs.py index 4ca0aa18b88..b0b9818118b 100644 --- a/homeassistant/components/recorder/table_managers/recorder_runs.py +++ b/homeassistant/components/recorder/table_managers/recorder_runs.py @@ -2,6 +2,8 @@ from __future__ import annotations +import bisect +from dataclasses import dataclass from datetime import datetime from sqlalchemy.orm.session import Session @@ -9,6 +11,34 @@ from sqlalchemy.orm.session import Session import homeassistant.util.dt as dt_util from ..db_schema import RecorderRuns +from ..models import process_timestamp + + +def _find_recorder_run_for_start_time( + run_history: _RecorderRunsHistory, start: datetime +) -> RecorderRuns | None: + """Find the recorder run for a start time in _RecorderRunsHistory.""" + run_timestamps = run_history.run_timestamps + runs_by_timestamp = run_history.runs_by_timestamp + + # bisect_left tells us were we would insert + # a value in the list of runs after the start timestamp. + # + # The run before that (idx-1) is when the run started + # + # If idx is 0, history never ran before the start timestamp + # + if idx := bisect.bisect_left(run_timestamps, start.timestamp()): + return runs_by_timestamp[run_timestamps[idx - 1]] + return None + + +@dataclass(frozen=True) +class _RecorderRunsHistory: + """Bisectable history of RecorderRuns.""" + + run_timestamps: list[int] + runs_by_timestamp: dict[int, RecorderRuns] class RecorderRunsManager: @@ -18,7 +48,7 @@ class RecorderRunsManager: """Track recorder run history.""" self._recording_start = dt_util.utcnow() self._current_run_info: RecorderRuns | None = None - self._first_run: RecorderRuns | None = None + self._run_history = _RecorderRunsHistory([], {}) @property def recording_start(self) -> datetime: @@ -28,7 +58,9 @@ class RecorderRunsManager: @property def first(self) -> RecorderRuns: """Get the first run.""" - return self._first_run or self.current + if runs_by_timestamp := self._run_history.runs_by_timestamp: + return next(iter(runs_by_timestamp.values())) + return self.current @property def current(self) -> RecorderRuns: @@ -46,6 +78,15 @@ class RecorderRunsManager: """Return if a run is active.""" return self._current_run_info is not None + def get(self, start: datetime) -> RecorderRuns | None: + """Return the recorder run that started before or at start. + + If the first run started after the start, return None + """ + if start >= self.recording_start: + return self.current + return _find_recorder_run_for_start_time(self._run_history, start) + def start(self, session: Session) -> None: """Start a new run. @@ -81,17 +122,31 @@ class RecorderRunsManager: Must run in the recorder thread. """ - if ( - run := session.query(RecorderRuns) - .order_by(RecorderRuns.start.asc()) - .first() - ): + run_timestamps: list[int] = [] + runs_by_timestamp: dict[int, RecorderRuns] = {} + + for run in session.query(RecorderRuns).order_by(RecorderRuns.start.asc()).all(): session.expunge(run) - self._first_run = run + if run_dt := process_timestamp(run.start): + # Not sure if this is correct or runs_by_timestamp annotation should be changed + timestamp = int(run_dt.timestamp()) + run_timestamps.append(timestamp) + runs_by_timestamp[timestamp] = run + + # + # self._run_history is accessed in get() + # which is allowed to be called from any thread + # + # We use a dataclass to ensure that when we update + # run_timestamps and runs_by_timestamp + # are never out of sync with each other. + # + self._run_history = _RecorderRunsHistory(run_timestamps, runs_by_timestamp) def clear(self) -> None: """Clear the current run after ending it. Must run in the recorder thread. """ - self._current_run_info = None + if self._current_run_info: + self._current_run_info = None diff --git a/tests/components/recorder/table_managers/test_recorder_runs.py b/tests/components/recorder/table_managers/test_recorder_runs.py index e79def01bad..41f3a8fef4d 100644 --- a/tests/components/recorder/table_managers/test_recorder_runs.py +++ b/tests/components/recorder/table_managers/test_recorder_runs.py @@ -21,11 +21,6 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None two_days_ago = now - timedelta(days=2) one_day_ago = now - timedelta(days=1) - # Test that the first run falls back to the current run - assert process_timestamp( - instance.recorder_runs_manager.first.start - ) == process_timestamp(instance.recorder_runs_manager.current.start) - with instance.get_session() as session: session.add(RecorderRuns(start=three_days_ago, created=three_days_ago)) session.add(RecorderRuns(start=two_days_ago, created=two_days_ago)) @@ -34,7 +29,32 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None instance.recorder_runs_manager.load_from_db(session) assert ( - process_timestamp(instance.recorder_runs_manager.first.start) == three_days_ago + process_timestamp( + instance.recorder_runs_manager.get( + three_days_ago + timedelta(microseconds=1) + ).start + ) + == three_days_ago + ) + assert ( + process_timestamp( + instance.recorder_runs_manager.get( + two_days_ago + timedelta(microseconds=1) + ).start + ) + == two_days_ago + ) + assert ( + process_timestamp( + instance.recorder_runs_manager.get( + one_day_ago + timedelta(microseconds=1) + ).start + ) + == one_day_ago + ) + assert ( + process_timestamp(instance.recorder_runs_manager.get(now).start) + == instance.recorder_runs_manager.recording_start ) From 9e2a3ea0e5c95c451ffc03f765b17041f69fcfa7 Mon Sep 17 00:00:00 2001 From: Dan Raper Date: Sat, 14 Dec 2024 17:12:44 +0000 Subject: [PATCH 239/677] Add Ohme integration (#132574) --- CODEOWNERS | 2 + homeassistant/components/ohme/__init__.py | 65 +++++ homeassistant/components/ohme/config_flow.py | 64 +++++ homeassistant/components/ohme/const.py | 6 + homeassistant/components/ohme/coordinator.py | 68 +++++ homeassistant/components/ohme/entity.py | 42 +++ homeassistant/components/ohme/icons.json | 18 ++ homeassistant/components/ohme/manifest.json | 11 + .../components/ohme/quality_scale.yaml | 83 ++++++ homeassistant/components/ohme/sensor.py | 107 +++++++ homeassistant/components/ohme/strings.json | 51 ++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/ohme/__init__.py | 14 + tests/components/ohme/conftest.py | 64 +++++ .../components/ohme/snapshots/test_init.ambr | 33 +++ .../ohme/snapshots/test_sensor.ambr | 268 ++++++++++++++++++ tests/components/ohme/test_config_flow.py | 110 +++++++ tests/components/ohme/test_init.py | 47 +++ tests/components/ohme/test_sensor.py | 59 ++++ 22 files changed, 1125 insertions(+) create mode 100644 homeassistant/components/ohme/__init__.py create mode 100644 homeassistant/components/ohme/config_flow.py create mode 100644 homeassistant/components/ohme/const.py create mode 100644 homeassistant/components/ohme/coordinator.py create mode 100644 homeassistant/components/ohme/entity.py create mode 100644 homeassistant/components/ohme/icons.json create mode 100644 homeassistant/components/ohme/manifest.json create mode 100644 homeassistant/components/ohme/quality_scale.yaml create mode 100644 homeassistant/components/ohme/sensor.py create mode 100644 homeassistant/components/ohme/strings.json create mode 100644 tests/components/ohme/__init__.py create mode 100644 tests/components/ohme/conftest.py create mode 100644 tests/components/ohme/snapshots/test_init.ambr create mode 100644 tests/components/ohme/snapshots/test_sensor.ambr create mode 100644 tests/components/ohme/test_config_flow.py create mode 100644 tests/components/ohme/test_init.py create mode 100644 tests/components/ohme/test_sensor.py diff --git a/CODEOWNERS b/CODEOWNERS index 06eb70c7576..f1c6aa4aea5 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1053,6 +1053,8 @@ build.json @home-assistant/supervisor /homeassistant/components/octoprint/ @rfleming71 /tests/components/octoprint/ @rfleming71 /homeassistant/components/ohmconnect/ @robbiet480 +/homeassistant/components/ohme/ @dan-r +/tests/components/ohme/ @dan-r /homeassistant/components/ollama/ @synesthesiam /tests/components/ollama/ @synesthesiam /homeassistant/components/ombi/ @larssont diff --git a/homeassistant/components/ohme/__init__.py b/homeassistant/components/ohme/__init__.py new file mode 100644 index 00000000000..8ca983cd72a --- /dev/null +++ b/homeassistant/components/ohme/__init__.py @@ -0,0 +1,65 @@ +"""Set up ohme integration.""" + +from dataclasses import dataclass + +from ohme import ApiException, AuthException, OhmeApiClient + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady + +from .const import DOMAIN, PLATFORMS +from .coordinator import OhmeAdvancedSettingsCoordinator, OhmeChargeSessionCoordinator + +type OhmeConfigEntry = ConfigEntry[OhmeRuntimeData] + + +@dataclass() +class OhmeRuntimeData: + """Dataclass to hold ohme coordinators.""" + + charge_session_coordinator: OhmeChargeSessionCoordinator + advanced_settings_coordinator: OhmeAdvancedSettingsCoordinator + + +async def async_setup_entry(hass: HomeAssistant, entry: OhmeConfigEntry) -> bool: + """Set up Ohme from a config entry.""" + + client = OhmeApiClient(entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD]) + + try: + await client.async_login() + + if not await client.async_update_device_info(): + raise ConfigEntryNotReady( + translation_key="device_info_failed", translation_domain=DOMAIN + ) + except AuthException as e: + raise ConfigEntryError( + translation_key="auth_failed", translation_domain=DOMAIN + ) from e + except ApiException as e: + raise ConfigEntryNotReady( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + + coordinators = ( + OhmeChargeSessionCoordinator(hass, client), + OhmeAdvancedSettingsCoordinator(hass, client), + ) + + for coordinator in coordinators: + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = OhmeRuntimeData(*coordinators) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: OhmeConfigEntry) -> bool: + """Unload a config entry.""" + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/ohme/config_flow.py b/homeassistant/components/ohme/config_flow.py new file mode 100644 index 00000000000..ea110f6df23 --- /dev/null +++ b/homeassistant/components/ohme/config_flow.py @@ -0,0 +1,64 @@ +"""Config flow for ohme integration.""" + +from typing import Any + +from ohme import ApiException, AuthException, OhmeApiClient +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import DOMAIN + +USER_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): TextSelector( + TextSelectorConfig( + type=TextSelectorType.EMAIL, + autocomplete="email", + ), + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ), + ), + } +) + + +class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): + """Config flow.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """First config step.""" + + errors: dict[str, str] = {} + + if user_input is not None: + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + + instance = OhmeApiClient(user_input[CONF_EMAIL], user_input[CONF_PASSWORD]) + try: + await instance.async_login() + except AuthException: + errors["base"] = "invalid_auth" + except ApiException: + errors["base"] = "unknown" + + if not errors: + return self.async_create_entry( + title=user_input[CONF_EMAIL], data=user_input + ) + + return self.async_show_form( + step_id="user", data_schema=USER_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/ohme/const.py b/homeassistant/components/ohme/const.py new file mode 100644 index 00000000000..adc5ddfd61b --- /dev/null +++ b/homeassistant/components/ohme/const.py @@ -0,0 +1,6 @@ +"""Component constants.""" + +from homeassistant.const import Platform + +DOMAIN = "ohme" +PLATFORMS = [Platform.SENSOR] diff --git a/homeassistant/components/ohme/coordinator.py b/homeassistant/components/ohme/coordinator.py new file mode 100644 index 00000000000..5de59b3d4b2 --- /dev/null +++ b/homeassistant/components/ohme/coordinator.py @@ -0,0 +1,68 @@ +"""Ohme coordinators.""" + +from abc import abstractmethod +from datetime import timedelta +import logging + +from ohme import ApiException, OhmeApiClient + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class OhmeBaseCoordinator(DataUpdateCoordinator[None]): + """Base for all Ohme coordinators.""" + + client: OhmeApiClient + _default_update_interval: timedelta | None = timedelta(minutes=1) + coordinator_name: str = "" + + def __init__(self, hass: HomeAssistant, client: OhmeApiClient) -> None: + """Initialise coordinator.""" + super().__init__( + hass, + _LOGGER, + name="", + update_interval=self._default_update_interval, + ) + + self.name = f"Ohme {self.coordinator_name}" + self.client = client + + async def _async_update_data(self) -> None: + """Fetch data from API endpoint.""" + try: + await self._internal_update_data() + except ApiException as e: + raise UpdateFailed( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + + @abstractmethod + async def _internal_update_data(self) -> None: + """Update coordinator data.""" + + +class OhmeChargeSessionCoordinator(OhmeBaseCoordinator): + """Coordinator to pull all updates from the API.""" + + coordinator_name = "Charge Sessions" + _default_update_interval = timedelta(seconds=30) + + async def _internal_update_data(self): + """Fetch data from API endpoint.""" + await self.client.async_get_charge_session() + + +class OhmeAdvancedSettingsCoordinator(OhmeBaseCoordinator): + """Coordinator to pull settings and charger state from the API.""" + + coordinator_name = "Advanced Settings" + + async def _internal_update_data(self): + """Fetch data from API endpoint.""" + await self.client.async_get_advanced_settings() diff --git a/homeassistant/components/ohme/entity.py b/homeassistant/components/ohme/entity.py new file mode 100644 index 00000000000..2c662f7fccb --- /dev/null +++ b/homeassistant/components/ohme/entity.py @@ -0,0 +1,42 @@ +"""Base class for entities.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import OhmeBaseCoordinator + + +class OhmeEntity(CoordinatorEntity[OhmeBaseCoordinator]): + """Base class for all Ohme entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: OhmeBaseCoordinator, + entity_description: EntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + + client = coordinator.client + self._attr_unique_id = f"{client.serial}_{entity_description.key}" + + device_info = client.device_info + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, client.serial)}, + name=device_info["name"], + manufacturer="Ohme", + model=device_info["model"], + sw_version=device_info["sw_version"], + serial_number=client.serial, + ) + + @property + def available(self) -> bool: + """Return if charger reporting as online.""" + return super().available and self.coordinator.client.available diff --git a/homeassistant/components/ohme/icons.json b/homeassistant/components/ohme/icons.json new file mode 100644 index 00000000000..228907b3dbe --- /dev/null +++ b/homeassistant/components/ohme/icons.json @@ -0,0 +1,18 @@ +{ + "entity": { + "sensor": { + "status": { + "default": "mdi:car", + "state": { + "unplugged": "mdi:power-plug-off", + "plugged_in": "mdi:power-plug", + "charging": "mdi:battery-charging-100", + "pending_approval": "mdi:alert-decagram" + } + }, + "ct_current": { + "default": "mdi:gauge" + } + } + } +} diff --git a/homeassistant/components/ohme/manifest.json b/homeassistant/components/ohme/manifest.json new file mode 100644 index 00000000000..2d387ce9e8a --- /dev/null +++ b/homeassistant/components/ohme/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "ohme", + "name": "Ohme", + "codeowners": ["@dan-r"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/ohme/", + "integration_type": "device", + "iot_class": "cloud_polling", + "quality_scale": "bronze", + "requirements": ["ohme==1.1.1"] +} diff --git a/homeassistant/components/ohme/quality_scale.yaml b/homeassistant/components/ohme/quality_scale.yaml new file mode 100644 index 00000000000..cffc9eb7b82 --- /dev/null +++ b/homeassistant/components/ohme/quality_scale.yaml @@ -0,0 +1,83 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration has no custom actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration has no custom actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + This integration has no explicit subscriptions to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration has no custom actions and read-only platform only. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration has no options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: todo + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery: + status: exempt + comment: | + All supported devices are cloud connected over mobile data. Discovery is not possible. + discovery-update-info: + status: exempt + comment: | + All supported devices are cloud connected over mobile data. Discovery is not possible. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: done + entity-disabled-by-default: todo + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration currently has no repairs. + stale-devices: todo + # Platinum + async-dependency: todo + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/ohme/sensor.py b/homeassistant/components/ohme/sensor.py new file mode 100644 index 00000000000..d4abaf85b1f --- /dev/null +++ b/homeassistant/components/ohme/sensor.py @@ -0,0 +1,107 @@ +"""Platform for sensor.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from ohme import ChargerStatus, OhmeApiClient + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfElectricCurrent, UnitOfEnergy, UnitOfPower +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OhmeConfigEntry +from .entity import OhmeEntity + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class OhmeSensorDescription(SensorEntityDescription): + """Class describing Ohme sensor entities.""" + + value_fn: Callable[[OhmeApiClient], str | int | float] + is_supported_fn: Callable[[OhmeApiClient], bool] = lambda _: True + + +SENSOR_CHARGE_SESSION = [ + OhmeSensorDescription( + key="status", + translation_key="status", + device_class=SensorDeviceClass.ENUM, + options=[e.value for e in ChargerStatus], + value_fn=lambda client: client.status.value, + ), + OhmeSensorDescription( + key="current", + device_class=SensorDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda client: client.power.amps, + ), + OhmeSensorDescription( + key="power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + suggested_display_precision=1, + value_fn=lambda client: client.power.watts, + ), + OhmeSensorDescription( + key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + suggested_display_precision=1, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda client: client.energy, + ), +] + +SENSOR_ADVANCED_SETTINGS = [ + OhmeSensorDescription( + key="ct_current", + translation_key="ct_current", + device_class=SensorDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda client: client.power.ct_amps, + is_supported_fn=lambda client: client.ct_connected, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: OhmeConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up sensors.""" + coordinators = config_entry.runtime_data + coordinator_map = [ + (SENSOR_CHARGE_SESSION, coordinators.charge_session_coordinator), + (SENSOR_ADVANCED_SETTINGS, coordinators.advanced_settings_coordinator), + ] + + async_add_entities( + OhmeSensor(coordinator, description) + for entities, coordinator in coordinator_map + for description in entities + if description.is_supported_fn(coordinator.client) + ) + + +class OhmeSensor(OhmeEntity, SensorEntity): + """Generic sensor for Ohme.""" + + entity_description: OhmeSensorDescription + + @property + def native_value(self) -> str | int | float: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.client) diff --git a/homeassistant/components/ohme/strings.json b/homeassistant/components/ohme/strings.json new file mode 100644 index 00000000000..06231ed5cf4 --- /dev/null +++ b/homeassistant/components/ohme/strings.json @@ -0,0 +1,51 @@ +{ + "config": { + "step": { + "user": { + "description": "Configure your Ohme account. If you signed up to Ohme with a third party account like Google, please reset your password via Ohme before configuring this integration.", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "Enter the email address associated with your Ohme account.", + "password": "Enter the password for your Ohme account" + } + } + }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "entity": { + "sensor": { + "status": { + "name": "Status", + "state": { + "unplugged": "Unplugged", + "plugged_in": "Plugged in", + "charging": "Charging", + "pending_approval": "Pending approval" + } + }, + "ct_current": { + "name": "CT current" + } + } + }, + "exceptions": { + "auth_failed": { + "message": "Unable to login to Ohme" + }, + "device_info_failed": { + "message": "Unable to get Ohme device information" + }, + "api_failed": { + "message": "Error communicating with Ohme API" + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 3b33d31a2a2..8e88e8a2ae8 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -423,6 +423,7 @@ FLOWS = { "nzbget", "obihai", "octoprint", + "ohme", "ollama", "omnilogic", "oncue", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 1530e308e7d..a94962b458b 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -4329,6 +4329,12 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "ohme": { + "name": "Ohme", + "integration_type": "device", + "config_flow": true, + "iot_class": "cloud_polling" + }, "ollama": { "name": "Ollama", "integration_type": "service", diff --git a/requirements_all.txt b/requirements_all.txt index 0f24315caf1..54e80820491 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1522,6 +1522,9 @@ odp-amsterdam==6.0.2 # homeassistant.components.oem oemthermostat==1.1.1 +# homeassistant.components.ohme +ohme==1.1.1 + # homeassistant.components.ollama ollama==0.3.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d6e9685d8d7..d4c1efeda15 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1270,6 +1270,9 @@ objgraph==3.5.0 # homeassistant.components.garages_amsterdam odp-amsterdam==6.0.2 +# homeassistant.components.ohme +ohme==1.1.1 + # homeassistant.components.ollama ollama==0.3.3 diff --git a/tests/components/ohme/__init__.py b/tests/components/ohme/__init__.py new file mode 100644 index 00000000000..7c00bedbd1e --- /dev/null +++ b/tests/components/ohme/__init__.py @@ -0,0 +1,14 @@ +"""Tests for the Ohme integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the Ohme integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/ohme/conftest.py b/tests/components/ohme/conftest.py new file mode 100644 index 00000000000..90395feeb6b --- /dev/null +++ b/tests/components/ohme/conftest.py @@ -0,0 +1,64 @@ +"""Provide common fixtures.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from ohme import ChargerPower, ChargerStatus +import pytest + +from homeassistant.components.ohme.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.ohme.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="test@example.com", + domain=DOMAIN, + version=1, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter2", + }, + ) + + +@pytest.fixture +def mock_client(): + """Fixture to mock the OhmeApiClient.""" + with ( + patch( + "homeassistant.components.ohme.config_flow.OhmeApiClient", + autospec=True, + ) as client, + patch( + "homeassistant.components.ohme.OhmeApiClient", + new=client, + ), + ): + client = client.return_value + client.async_login.return_value = True + client.status = ChargerStatus.CHARGING + client.power = ChargerPower(0, 0, 0, 0) + client.serial = "chargerid" + client.ct_connected = True + client.energy = 1000 + client.device_info = { + "name": "Ohme Home Pro", + "model": "Home Pro", + "sw_version": "v2.65", + } + yield client diff --git a/tests/components/ohme/snapshots/test_init.ambr b/tests/components/ohme/snapshots/test_init.ambr new file mode 100644 index 00000000000..e3ed339b78a --- /dev/null +++ b/tests/components/ohme/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'ohme', + 'chargerid', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Ohme', + 'model': 'Home Pro', + 'model_id': None, + 'name': 'Ohme Home Pro', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': 'chargerid', + 'suggested_area': None, + 'sw_version': 'v2.65', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/ohme/snapshots/test_sensor.ambr b/tests/components/ohme/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..fbffa5b7e5d --- /dev/null +++ b/tests/components/ohme/snapshots/test_sensor.ambr @@ -0,0 +1,268 @@ +# serializer version: 1 +# name: test_sensors[sensor.ohme_home_pro_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CT current', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ct_current', + 'unique_id': 'chargerid_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Ohme Home Pro CT current', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'chargerid_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Ohme Home Pro Current', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'chargerid_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Ohme Home Pro Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'chargerid_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Ohme Home Pro Power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'unplugged', + 'pending_approval', + 'charging', + 'plugged_in', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohme_home_pro_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': 'chargerid_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.ohme_home_pro_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Ohme Home Pro Status', + 'options': list([ + 'unplugged', + 'pending_approval', + 'charging', + 'plugged_in', + ]), + }), + 'context': , + 'entity_id': 'sensor.ohme_home_pro_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'charging', + }) +# --- diff --git a/tests/components/ohme/test_config_flow.py b/tests/components/ohme/test_config_flow.py new file mode 100644 index 00000000000..b9d4a10a76e --- /dev/null +++ b/tests/components/ohme/test_config_flow.py @@ -0,0 +1,110 @@ +"""Tests for the config flow.""" + +from unittest.mock import AsyncMock, MagicMock + +from ohme import ApiException, AuthException +import pytest + +from homeassistant.components.ohme.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_config_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_client: MagicMock +) -> None: + """Test config flow.""" + + # Initial form load + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Successful login + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "test@example.com", CONF_PASSWORD: "hunter2"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test@example.com" + assert result["data"] == { + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter2", + } + + +@pytest.mark.parametrize( + ("test_exception", "expected_error"), + [(AuthException, "invalid_auth"), (ApiException, "unknown")], +) +async def test_config_flow_fail( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_client: MagicMock, + test_exception: Exception, + expected_error: str, +) -> None: + """Test config flow errors.""" + + # Initial form load + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Failed login + mock_client.async_login.side_effect = test_exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "test@example.com", CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + # End with CREATE_ENTRY + mock_client.async_login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_EMAIL: "test@example.com", CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test@example.com" + assert result["data"] == { + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + } + + +async def test_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Ensure we can't add the same account twice.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter3", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/ohme/test_init.py b/tests/components/ohme/test_init.py new file mode 100644 index 00000000000..0f4c7cd64ee --- /dev/null +++ b/tests/components/ohme/test_init.py @@ -0,0 +1,47 @@ +"""Test init of Ohme integration.""" + +from unittest.mock import MagicMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.ohme.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test loading and unloading the integration.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device( + mock_client: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Snapshot the device from registry.""" + await setup_integration(hass, mock_config_entry) + + device = device_registry.async_get_device({(DOMAIN, mock_client.serial)}) + assert device + assert device == snapshot diff --git a/tests/components/ohme/test_sensor.py b/tests/components/ohme/test_sensor.py new file mode 100644 index 00000000000..21f9f06f963 --- /dev/null +++ b/tests/components/ohme/test_sensor.py @@ -0,0 +1,59 @@ +"""Tests for sensors.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from ohme import ApiException +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the Ohme sensors.""" + with patch("homeassistant.components.ohme.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_sensors_unavailable( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test that sensors show as unavailable after a coordinator failure.""" + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.ohme_home_pro_energy") + assert state.state == "1.0" + + mock_client.async_get_charge_session.side_effect = ApiException + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("sensor.ohme_home_pro_energy") + assert state.state == STATE_UNAVAILABLE + + mock_client.async_get_charge_session.side_effect = None + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("sensor.ohme_home_pro_energy") + assert state.state == "1.0" From ff1df757b157c912eeee993fdd0347686b11ffec Mon Sep 17 00:00:00 2001 From: YogevBokobza Date: Sat, 14 Dec 2024 21:06:36 +0200 Subject: [PATCH 240/677] Switcher move _async_call_api to entity.py (#132877) * Switcher move _async_call_api to entity.py * fix based on requested changes * fix based on requested changes --- .../components/switcher_kis/cover.py | 31 ---------------- .../components/switcher_kis/entity.py | 34 ++++++++++++++++++ .../components/switcher_kis/light.py | 31 ---------------- .../components/switcher_kis/switch.py | 31 +--------------- tests/components/switcher_kis/conftest.py | 12 ++----- tests/components/switcher_kis/test_button.py | 8 ++--- tests/components/switcher_kis/test_climate.py | 18 +++++----- tests/components/switcher_kis/test_cover.py | 12 +++---- tests/components/switcher_kis/test_light.py | 8 ++--- .../components/switcher_kis/test_services.py | 26 +++++++------- tests/components/switcher_kis/test_switch.py | 36 ++++++++++--------- 11 files changed, 91 insertions(+), 156 deletions(-) diff --git a/homeassistant/components/switcher_kis/cover.py b/homeassistant/components/switcher_kis/cover.py index 7d3ec0e4af0..513b786a033 100644 --- a/homeassistant/components/switcher_kis/cover.py +++ b/homeassistant/components/switcher_kis/cover.py @@ -2,10 +2,8 @@ from __future__ import annotations -import logging from typing import Any, cast -from aioswitcher.api import SwitcherApi, SwitcherBaseResponse from aioswitcher.device import DeviceCategory, ShutterDirection, SwitcherShutter from homeassistant.components.cover import ( @@ -16,7 +14,6 @@ from homeassistant.components.cover import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -24,8 +21,6 @@ from .const import SIGNAL_DEVICE_ADD from .coordinator import SwitcherDataUpdateCoordinator from .entity import SwitcherEntity -_LOGGER = logging.getLogger(__name__) - API_SET_POSITON = "set_position" API_STOP = "stop_shutter" @@ -92,32 +87,6 @@ class SwitcherBaseCoverEntity(SwitcherEntity, CoverEntity): data.direction[self._cover_id] == ShutterDirection.SHUTTER_UP ) - async def _async_call_api(self, api: str, *args: Any) -> None: - """Call Switcher API.""" - _LOGGER.debug("Calling api for %s, api: '%s', args: %s", self.name, api, args) - response: SwitcherBaseResponse | None = None - error = None - - try: - async with SwitcherApi( - self.coordinator.data.device_type, - self.coordinator.data.ip_address, - self.coordinator.data.device_id, - self.coordinator.data.device_key, - self.coordinator.token, - ) as swapi: - response = await getattr(swapi, api)(*args) - except (TimeoutError, OSError, RuntimeError) as err: - error = repr(err) - - if error or not response or not response.successful: - self.coordinator.last_update_success = False - self.async_write_ha_state() - raise HomeAssistantError( - f"Call api for {self.name} failed, api: '{api}', " - f"args: {args}, response/error: {response or error}" - ) - async def async_close_cover(self, **kwargs: Any) -> None: """Close cover.""" await self._async_call_api(API_SET_POSITON, 0, self._cover_id) diff --git a/homeassistant/components/switcher_kis/entity.py b/homeassistant/components/switcher_kis/entity.py index 12bde521377..e24f59a4a1c 100644 --- a/homeassistant/components/switcher_kis/entity.py +++ b/homeassistant/components/switcher_kis/entity.py @@ -1,11 +1,19 @@ """Base class for Switcher entities.""" +import logging +from typing import Any + +from aioswitcher.api import SwitcherApi, SwitcherBaseResponse + +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import SwitcherDataUpdateCoordinator +_LOGGER = logging.getLogger(__name__) + class SwitcherEntity(CoordinatorEntity[SwitcherDataUpdateCoordinator]): """Base class for Switcher entities.""" @@ -18,3 +26,29 @@ class SwitcherEntity(CoordinatorEntity[SwitcherDataUpdateCoordinator]): self._attr_device_info = DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, coordinator.mac_address)} ) + + async def _async_call_api(self, api: str, *args: Any) -> None: + """Call Switcher API.""" + _LOGGER.debug("Calling api for %s, api: '%s', args: %s", self.name, api, args) + response: SwitcherBaseResponse | None = None + error = None + + try: + async with SwitcherApi( + self.coordinator.data.device_type, + self.coordinator.data.ip_address, + self.coordinator.data.device_id, + self.coordinator.data.device_key, + self.coordinator.token, + ) as swapi: + response = await getattr(swapi, api)(*args) + except (TimeoutError, OSError, RuntimeError) as err: + error = repr(err) + + if error or not response or not response.successful: + self.coordinator.last_update_success = False + self.async_write_ha_state() + raise HomeAssistantError( + f"Call api for {self.name} failed, api: '{api}', " + f"args: {args}, response/error: {response or error}" + ) diff --git a/homeassistant/components/switcher_kis/light.py b/homeassistant/components/switcher_kis/light.py index b2ee624dbc5..75156044efa 100644 --- a/homeassistant/components/switcher_kis/light.py +++ b/homeassistant/components/switcher_kis/light.py @@ -2,16 +2,13 @@ from __future__ import annotations -import logging from typing import Any, cast -from aioswitcher.api import SwitcherApi, SwitcherBaseResponse from aioswitcher.device import DeviceCategory, DeviceState, SwitcherLight from homeassistant.components.light import ColorMode, LightEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -19,8 +16,6 @@ from .const import SIGNAL_DEVICE_ADD from .coordinator import SwitcherDataUpdateCoordinator from .entity import SwitcherEntity -_LOGGER = logging.getLogger(__name__) - API_SET_LIGHT = "set_light" @@ -79,32 +74,6 @@ class SwitcherBaseLightEntity(SwitcherEntity, LightEntity): data = cast(SwitcherLight, self.coordinator.data) return bool(data.light[self._light_id] == DeviceState.ON) - async def _async_call_api(self, api: str, *args: Any) -> None: - """Call Switcher API.""" - _LOGGER.debug("Calling api for %s, api: '%s', args: %s", self.name, api, args) - response: SwitcherBaseResponse | None = None - error = None - - try: - async with SwitcherApi( - self.coordinator.data.device_type, - self.coordinator.data.ip_address, - self.coordinator.data.device_id, - self.coordinator.data.device_key, - self.coordinator.token, - ) as swapi: - response = await getattr(swapi, api)(*args) - except (TimeoutError, OSError, RuntimeError) as err: - error = repr(err) - - if error or not response or not response.successful: - self.coordinator.last_update_success = False - self.async_write_ha_state() - raise HomeAssistantError( - f"Call api for {self.name} failed, api: '{api}', " - f"args: {args}, response/error: {response or error}" - ) - async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" await self._async_call_api(API_SET_LIGHT, DeviceState.ON, self._light_id) diff --git a/homeassistant/components/switcher_kis/switch.py b/homeassistant/components/switcher_kis/switch.py index 7d14620c1aa..ba0a99b4089 100644 --- a/homeassistant/components/switcher_kis/switch.py +++ b/homeassistant/components/switcher_kis/switch.py @@ -6,7 +6,7 @@ from datetime import timedelta import logging from typing import Any -from aioswitcher.api import Command, SwitcherApi, SwitcherBaseResponse +from aioswitcher.api import Command from aioswitcher.device import DeviceCategory, DeviceState import voluptuous as vol @@ -96,35 +96,6 @@ class SwitcherBaseSwitchEntity(SwitcherEntity, SwitchEntity): self.control_result = None self.async_write_ha_state() - async def _async_call_api(self, api: str, *args: Any) -> None: - """Call Switcher API.""" - _LOGGER.debug( - "Calling api for %s, api: '%s', args: %s", self.coordinator.name, api, args - ) - response: SwitcherBaseResponse | None = None - error = None - - try: - async with SwitcherApi( - self.coordinator.data.device_type, - self.coordinator.data.ip_address, - self.coordinator.data.device_id, - self.coordinator.data.device_key, - ) as swapi: - response = await getattr(swapi, api)(*args) - except (TimeoutError, OSError, RuntimeError) as err: - error = repr(err) - - if error or not response or not response.successful: - _LOGGER.error( - "Call api for %s failed, api: '%s', args: %s, response/error: %s", - self.coordinator.name, - api, - args, - response or error, - ) - self.coordinator.last_update_success = False - @property def is_on(self) -> bool: """Return True if entity is on.""" diff --git a/tests/components/switcher_kis/conftest.py b/tests/components/switcher_kis/conftest.py index 518c36616ee..58172a6962d 100644 --- a/tests/components/switcher_kis/conftest.py +++ b/tests/components/switcher_kis/conftest.py @@ -60,19 +60,11 @@ def mock_api(): patchers = [ patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.connect", + "homeassistant.components.switcher_kis.entity.SwitcherApi.connect", new=api_mock, ), patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.disconnect", - new=api_mock, - ), - patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.connect", - new=api_mock, - ), - patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.disconnect", + "homeassistant.components.switcher_kis.entity.SwitcherApi.disconnect", new=api_mock, ), ] diff --git a/tests/components/switcher_kis/test_button.py b/tests/components/switcher_kis/test_button.py index 50c015b4024..6ebd82363e4 100644 --- a/tests/components/switcher_kis/test_button.py +++ b/tests/components/switcher_kis/test_button.py @@ -42,7 +42,7 @@ async def test_assume_button( assert hass.states.get(SWING_OFF_EID) is None with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( BUTTON_DOMAIN, @@ -79,7 +79,7 @@ async def test_swing_button( assert hass.states.get(SWING_OFF_EID) is not None with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( BUTTON_DOMAIN, @@ -103,7 +103,7 @@ async def test_control_device_fail( # Test exception during set hvac mode with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -130,7 +130,7 @@ async def test_control_device_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_climate.py b/tests/components/switcher_kis/test_climate.py index 72e1a93d1c3..72a25d20d04 100644 --- a/tests/components/switcher_kis/test_climate.py +++ b/tests/components/switcher_kis/test_climate.py @@ -49,7 +49,7 @@ async def test_climate_hvac_mode( # Test set hvac mode heat with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -71,7 +71,7 @@ async def test_climate_hvac_mode( # Test set hvac mode off with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -108,7 +108,7 @@ async def test_climate_temperature( # Test set target temperature with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -128,7 +128,7 @@ async def test_climate_temperature( # Test set target temperature - incorrect params with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: with pytest.raises(ServiceValidationError): await hass.services.async_call( @@ -160,7 +160,7 @@ async def test_climate_fan_level( # Test set fan level to high with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -195,7 +195,7 @@ async def test_climate_swing( # Test set swing mode on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -218,7 +218,7 @@ async def test_climate_swing( # Test set swing mode off with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -249,7 +249,7 @@ async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) - # Test exception during set hvac mode with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -276,7 +276,7 @@ async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) - # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_breeze_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_cover.py b/tests/components/switcher_kis/test_cover.py index 2936cafdd53..5829d6345ef 100644 --- a/tests/components/switcher_kis/test_cover.py +++ b/tests/components/switcher_kis/test_cover.py @@ -115,7 +115,7 @@ async def test_cover( # Test set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -136,7 +136,7 @@ async def test_cover( # Test open with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -156,7 +156,7 @@ async def test_cover( # Test close with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -176,7 +176,7 @@ async def test_cover( # Test stop with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.stop_shutter" + "homeassistant.components.switcher_kis.entity.SwitcherApi.stop_shutter" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -232,7 +232,7 @@ async def test_cover_control_fail( # Test exception during set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -257,7 +257,7 @@ async def test_cover_control_fail( # Test error response during set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_position", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_light.py b/tests/components/switcher_kis/test_light.py index aa7d6551d75..51d0eb6332f 100644 --- a/tests/components/switcher_kis/test_light.py +++ b/tests/components/switcher_kis/test_light.py @@ -86,7 +86,7 @@ async def test_light( # Test turning on light with patch( - "homeassistant.components.switcher_kis.light.SwitcherApi.set_light", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light", ) as mock_set_light: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -99,7 +99,7 @@ async def test_light( # Test turning off light with patch( - "homeassistant.components.switcher_kis.light.SwitcherApi.set_light" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light" ) as mock_set_light: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -153,7 +153,7 @@ async def test_light_control_fail( # Test exception during turn on with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_light", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -178,7 +178,7 @@ async def test_light_control_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.cover.SwitcherApi.set_light", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_light", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_services.py b/tests/components/switcher_kis/test_services.py index 65e1967cbac..b4a8168419f 100644 --- a/tests/components/switcher_kis/test_services.py +++ b/tests/components/switcher_kis/test_services.py @@ -16,6 +16,7 @@ from homeassistant.components.switcher_kis.const import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.config_validation import time_period_str from homeassistant.util import slugify @@ -48,7 +49,7 @@ async def test_turn_on_with_timer_service( assert state.state == STATE_OFF with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device" + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device" ) as mock_control_device: await hass.services.async_call( DOMAIN, @@ -78,7 +79,7 @@ async def test_set_auto_off_service(hass: HomeAssistant, mock_bridge, mock_api) entity_id = f"{SWITCH_DOMAIN}.{slugify(device.name)}" with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.set_auto_shutdown" + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_auto_shutdown" ) as mock_set_auto_shutdown: await hass.services.async_call( DOMAIN, @@ -95,7 +96,7 @@ async def test_set_auto_off_service(hass: HomeAssistant, mock_bridge, mock_api) @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) async def test_set_auto_off_service_fail( - hass: HomeAssistant, mock_bridge, mock_api, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, mock_bridge, mock_api ) -> None: """Test set auto off service failed.""" await init_integration(hass) @@ -105,24 +106,21 @@ async def test_set_auto_off_service_fail( entity_id = f"{SWITCH_DOMAIN}.{slugify(device.name)}" with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.set_auto_shutdown", + "homeassistant.components.switcher_kis.entity.SwitcherApi.set_auto_shutdown", return_value=None, ) as mock_set_auto_shutdown: - await hass.services.async_call( - DOMAIN, - SERVICE_SET_AUTO_OFF_NAME, - {ATTR_ENTITY_ID: entity_id, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, - blocking=True, - ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_SET_AUTO_OFF_NAME, + {ATTR_ENTITY_ID: entity_id, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, + blocking=True, + ) assert mock_api.call_count == 2 mock_set_auto_shutdown.assert_called_once_with( time_period_str(DUMMY_AUTO_OFF_SET) ) - assert ( - f"Call api for {device.name} failed, api: 'set_auto_shutdown'" - in caplog.text - ) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/switcher_kis/test_switch.py b/tests/components/switcher_kis/test_switch.py index 443c7bc930d..9bfe11fe202 100644 --- a/tests/components/switcher_kis/test_switch.py +++ b/tests/components/switcher_kis/test_switch.py @@ -16,6 +16,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.util import slugify from . import init_integration @@ -47,7 +48,7 @@ async def test_switch( # Test turning on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device", ) as mock_control_device: await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -60,7 +61,7 @@ async def test_switch( # Test turning off with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device" + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device" ) as mock_control_device: await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -78,7 +79,6 @@ async def test_switch_control_fail( mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch, - caplog: pytest.LogCaptureFixture, ) -> None: """Test switch control fail.""" await init_integration(hass) @@ -97,18 +97,19 @@ async def test_switch_control_fail( # Test exception during turn on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: - await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) assert mock_api.call_count == 2 mock_control_device.assert_called_once_with(Command.ON) - assert ( - f"Call api for {device.name} failed, api: 'control_device'" in caplog.text - ) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE @@ -121,17 +122,18 @@ async def test_switch_control_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device", + "homeassistant.components.switcher_kis.entity.SwitcherApi.control_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: - await hass.services.async_call( - SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True - ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) assert mock_api.call_count == 4 mock_control_device.assert_called_once_with(Command.ON) - assert ( - f"Call api for {device.name} failed, api: 'control_device'" in caplog.text - ) state = hass.states.get(entity_id) assert state.state == STATE_UNAVAILABLE From 79ecb4a87cfa935816886ea8a5dd6b684c594280 Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Sat, 14 Dec 2024 20:43:27 +0100 Subject: [PATCH 241/677] Suez_water: add removal instructions (#133206) --- homeassistant/components/suez_water/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/suez_water/quality_scale.yaml b/homeassistant/components/suez_water/quality_scale.yaml index 0980ee472eb..474340a1489 100644 --- a/homeassistant/components/suez_water/quality_scale.yaml +++ b/homeassistant/components/suez_water/quality_scale.yaml @@ -21,7 +21,7 @@ rules: common-modules: done docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done docs-actions: status: exempt comment: no service action From 35d5a16a3ca35014e505ec5449e394c36a369a7f Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Sat, 14 Dec 2024 20:47:06 +0100 Subject: [PATCH 242/677] Bump pynecil to 2.1.0 (#133211) --- homeassistant/components/iron_os/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/iron_os/manifest.json b/homeassistant/components/iron_os/manifest.json index d85b8bf4707..982fae16cc4 100644 --- a/homeassistant/components/iron_os/manifest.json +++ b/homeassistant/components/iron_os/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/iron_os", "iot_class": "local_polling", "loggers": ["pynecil", "aiogithubapi"], - "requirements": ["pynecil==2.0.2", "aiogithubapi==24.6.0"] + "requirements": ["pynecil==2.1.0", "aiogithubapi==24.6.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 54e80820491..37248e33077 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2104,7 +2104,7 @@ pymsteams==0.1.12 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==2.0.2 +pynecil==2.1.0 # homeassistant.components.netgear pynetgear==0.10.10 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d4c1efeda15..5187e004989 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1706,7 +1706,7 @@ pymonoprice==0.4 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==2.0.2 +pynecil==2.1.0 # homeassistant.components.netgear pynetgear==0.10.10 From 4dc1405e9934fc6aaadbcef533876a4c7cfe3688 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sat, 14 Dec 2024 20:51:30 +0100 Subject: [PATCH 243/677] Bump incomfort-client to v0.6.4 (#133205) --- homeassistant/components/incomfort/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/incomfort/manifest.json b/homeassistant/components/incomfort/manifest.json index 40c93012eef..f404f33b970 100644 --- a/homeassistant/components/incomfort/manifest.json +++ b/homeassistant/components/incomfort/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/incomfort", "iot_class": "local_polling", "loggers": ["incomfortclient"], - "requirements": ["incomfort-client==0.6.3-1"] + "requirements": ["incomfort-client==0.6.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 37248e33077..7fcc2db9e06 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1197,7 +1197,7 @@ ihcsdk==2.8.5 imgw_pib==1.0.6 # homeassistant.components.incomfort -incomfort-client==0.6.3-1 +incomfort-client==0.6.4 # homeassistant.components.influxdb influxdb-client==1.24.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5187e004989..c97aac88311 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1011,7 +1011,7 @@ ifaddr==0.2.0 imgw_pib==1.0.6 # homeassistant.components.incomfort -incomfort-client==0.6.3-1 +incomfort-client==0.6.4 # homeassistant.components.influxdb influxdb-client==1.24.0 From 74aa1a8f7e6a782e72995aa1b4e0a27eb3cbcb8d Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sat, 14 Dec 2024 21:47:27 +0100 Subject: [PATCH 244/677] Update Fronius translations (#132876) * Remove exception translation that's handled by configflow errors dict * Remove entity name translations handled by device class * Add data_description for Fronius config flow * Remove unnecessary exception case * review suggestion --- .../components/fronius/config_flow.py | 7 +--- homeassistant/components/fronius/strings.json | 24 ++--------- tests/components/fronius/test_config_flow.py | 42 ++++++------------- 3 files changed, 18 insertions(+), 55 deletions(-) diff --git a/homeassistant/components/fronius/config_flow.py b/homeassistant/components/fronius/config_flow.py index 1d5a26984fa..53433e31233 100644 --- a/homeassistant/components/fronius/config_flow.py +++ b/homeassistant/components/fronius/config_flow.py @@ -52,14 +52,9 @@ async def validate_host( try: inverter_info = await fronius.inverter_info() first_inverter = next(inverter for inverter in inverter_info["inverters"]) - except FroniusError as err: + except (FroniusError, StopIteration) as err: _LOGGER.debug(err) raise CannotConnect from err - except StopIteration as err: - raise CannotConnect( - translation_domain=DOMAIN, - translation_key="no_supported_device_found", - ) from err first_inverter_uid: str = first_inverter["unique_id"]["value"] return first_inverter_uid, FroniusConfigEntryData( host=host, diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index 86348a0e2d7..9a2b498f28c 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -3,10 +3,12 @@ "flow_title": "{device}", "step": { "user": { - "title": "Fronius SolarNet", - "description": "Configure the IP address or local hostname of your Fronius device.", + "description": "Configure your Fronius SolarAPI device.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The IP address or hostname of your Fronius device." } }, "confirm_discovery": { @@ -41,9 +43,6 @@ "energy_total": { "name": "Total energy" }, - "frequency_ac": { - "name": "[%key:component::sensor::entity_component::frequency::name%]" - }, "current_ac": { "name": "AC current" }, @@ -156,9 +155,6 @@ "power_apparent_phase_3": { "name": "Apparent power phase 3" }, - "power_apparent": { - "name": "[%key:component::sensor::entity_component::apparent_power::name%]" - }, "power_factor_phase_1": { "name": "Power factor phase 1" }, @@ -168,9 +164,6 @@ "power_factor_phase_3": { "name": "Power factor phase 3" }, - "power_factor": { - "name": "[%key:component::sensor::entity_component::power_factor::name%]" - }, "power_reactive_phase_1": { "name": "Reactive power phase 1" }, @@ -216,12 +209,6 @@ "energy_real_ac_consumed": { "name": "Energy consumed" }, - "power_real_ac": { - "name": "[%key:component::sensor::entity_component::power::name%]" - }, - "temperature_channel_1": { - "name": "[%key:component::sensor::entity_component::temperature::name%]" - }, "state_code": { "name": "State code" }, @@ -296,9 +283,6 @@ } }, "exceptions": { - "no_supported_device_found": { - "message": "No supported Fronius SolarNet device found." - }, "entry_cannot_connect": { "message": "Failed to connect to Fronius device at {host}: {fronius_error}" }, diff --git a/tests/components/fronius/test_config_flow.py b/tests/components/fronius/test_config_flow.py index 1b9c41d5aa6..5d0b93e7cd5 100644 --- a/tests/components/fronius/test_config_flow.py +++ b/tests/components/fronius/test_config_flow.py @@ -118,8 +118,18 @@ async def test_form_with_inverter(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_cannot_connect(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + "inverter_side_effect", + [ + FroniusError, + None, # raises StopIteration through INVERTER_INFO_NONE + ], +) +async def test_form_cannot_connect( + hass: HomeAssistant, inverter_side_effect: type[FroniusError] | None +) -> None: """Test we handle cannot connect error.""" + INVERTER_INFO_NONE: dict[str, list] = {"inverters": []} result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -131,34 +141,8 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None: ), patch( "pyfronius.Fronius.inverter_info", - side_effect=FroniusError, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.1", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - -async def test_form_no_device(hass: HomeAssistant) -> None: - """Test we handle no device found error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch( - "pyfronius.Fronius.current_logger_info", - side_effect=FroniusError, - ), - patch( - "pyfronius.Fronius.inverter_info", - return_value={"inverters": []}, + side_effect=inverter_side_effect, + return_value=INVERTER_INFO_NONE, ), ): result2 = await hass.config_entries.flow.async_configure( From 2117e35d53b1cf397a149ee9f45f3089f94d4bb4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 14 Dec 2024 15:06:26 -0600 Subject: [PATCH 245/677] Bump yalexs-ble to 2.5.5 (#133229) changelog: https://github.com/bdraco/yalexs-ble/compare/v2.5.4...v2.5.5 --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yalexs_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index ed2c8007ee8..d0b41411c96 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 2ed1f4b5c43..7b7edfac77b 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] } diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index 1472f9035ea..b2c331397b3 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.4"] + "requirements": ["yalexs-ble==2.5.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7fcc2db9e06..4c257ba9c11 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3058,7 +3058,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.4 +yalexs-ble==2.5.5 # homeassistant.components.august # homeassistant.components.yale diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c97aac88311..5b33e7d3c12 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2453,7 +2453,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.4 +yalexs-ble==2.5.5 # homeassistant.components.august # homeassistant.components.yale From 229a68dc7321de4a43b96a71b15e11189dd7135d Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sun, 15 Dec 2024 09:27:14 +0100 Subject: [PATCH 246/677] set PARALLEL_UPDATES to 1 for enphase_envoy (#132373) * set PARALLEL_UPDATES to 1 for enphase_envoy * move PARALLEL_UPDATES from _init_ to platform files. * Implement review feedback * set parrallel_update 0 for read-only platforms --- homeassistant/components/enphase_envoy/binary_sensor.py | 2 ++ homeassistant/components/enphase_envoy/number.py | 2 ++ homeassistant/components/enphase_envoy/select.py | 2 ++ homeassistant/components/enphase_envoy/sensor.py | 2 ++ homeassistant/components/enphase_envoy/switch.py | 2 ++ 5 files changed, 10 insertions(+) diff --git a/homeassistant/components/enphase_envoy/binary_sensor.py b/homeassistant/components/enphase_envoy/binary_sensor.py index 6be29d19ecb..1ad6f259de1 100644 --- a/homeassistant/components/enphase_envoy/binary_sensor.py +++ b/homeassistant/components/enphase_envoy/binary_sensor.py @@ -22,6 +22,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class EnvoyEnchargeBinarySensorEntityDescription(BinarySensorEntityDescription): diff --git a/homeassistant/components/enphase_envoy/number.py b/homeassistant/components/enphase_envoy/number.py index f27335b1f4c..a62913a4c0b 100644 --- a/homeassistant/components/enphase_envoy/number.py +++ b/homeassistant/components/enphase_envoy/number.py @@ -25,6 +25,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class EnvoyRelayNumberEntityDescription(NumberEntityDescription): diff --git a/homeassistant/components/enphase_envoy/select.py b/homeassistant/components/enphase_envoy/select.py index 903c2c1edf6..d9729a16683 100644 --- a/homeassistant/components/enphase_envoy/select.py +++ b/homeassistant/components/enphase_envoy/select.py @@ -20,6 +20,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class EnvoyRelaySelectEntityDescription(SelectEntityDescription): diff --git a/homeassistant/components/enphase_envoy/sensor.py b/homeassistant/components/enphase_envoy/sensor.py index 20d610e4b71..fadbf191840 100644 --- a/homeassistant/components/enphase_envoy/sensor.py +++ b/homeassistant/components/enphase_envoy/sensor.py @@ -59,6 +59,8 @@ _LOGGER = logging.getLogger(__name__) INVERTERS_KEY = "inverters" LAST_REPORTED_KEY = "last_reported" +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class EnvoyInverterSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/enphase_envoy/switch.py b/homeassistant/components/enphase_envoy/switch.py index 14451aaf266..5170b694587 100644 --- a/homeassistant/components/enphase_envoy/switch.py +++ b/homeassistant/components/enphase_envoy/switch.py @@ -20,6 +20,8 @@ from .const import DOMAIN from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator from .entity import EnvoyBaseEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class EnvoyEnpowerSwitchEntityDescription(SwitchEntityDescription): From 1b2cf68e8277bbcc6296a436fca3d79025b38cec Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Sun, 15 Dec 2024 09:46:14 +0100 Subject: [PATCH 247/677] Suez_water: store coordinator in runtime_data (#133204) * Suez_water: store coordinator in runtime_data * jhfg --- homeassistant/components/suez_water/__init__.py | 15 +++++---------- .../components/suez_water/coordinator.py | 7 +++++-- .../components/suez_water/quality_scale.yaml | 4 +--- homeassistant/components/suez_water/sensor.py | 7 +++---- 4 files changed, 14 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/suez_water/__init__.py b/homeassistant/components/suez_water/__init__.py index 06f503b85c2..cbaac912642 100644 --- a/homeassistant/components/suez_water/__init__.py +++ b/homeassistant/components/suez_water/__init__.py @@ -2,32 +2,27 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import SuezWaterCoordinator +from .coordinator import SuezWaterConfigEntry, SuezWaterCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: SuezWaterConfigEntry) -> bool: """Set up Suez Water from a config entry.""" coordinator = SuezWaterCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SuezWaterConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/suez_water/coordinator.py b/homeassistant/components/suez_water/coordinator.py index 224929c606e..72da68c0f5d 100644 --- a/homeassistant/components/suez_water/coordinator.py +++ b/homeassistant/components/suez_water/coordinator.py @@ -37,13 +37,16 @@ class SuezWaterData: price: float +type SuezWaterConfigEntry = ConfigEntry[SuezWaterCoordinator] + + class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]): """Suez water coordinator.""" _suez_client: SuezClient - config_entry: ConfigEntry + config_entry: SuezWaterConfigEntry - def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, config_entry: SuezWaterConfigEntry) -> None: """Initialize suez water coordinator.""" super().__init__( hass, diff --git a/homeassistant/components/suez_water/quality_scale.yaml b/homeassistant/components/suez_water/quality_scale.yaml index 474340a1489..399c0b73a5a 100644 --- a/homeassistant/components/suez_water/quality_scale.yaml +++ b/homeassistant/components/suez_water/quality_scale.yaml @@ -4,9 +4,7 @@ rules: test-before-configure: done unique-config-entry: done config-flow-test-coverage: done - runtime-data: - status: todo - comment: coordinator is created during setup, should be stored in runtime_data + runtime-data: done test-before-setup: done appropriate-polling: done entity-unique-id: done diff --git a/homeassistant/components/suez_water/sensor.py b/homeassistant/components/suez_water/sensor.py index 2ba699a9af1..e4e53dd7f6d 100644 --- a/homeassistant/components/suez_water/sensor.py +++ b/homeassistant/components/suez_water/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CURRENCY_EURO, UnitOfVolume from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo @@ -21,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import CONF_COUNTER_ID, DOMAIN -from .coordinator import SuezWaterCoordinator, SuezWaterData +from .coordinator import SuezWaterConfigEntry, SuezWaterCoordinator, SuezWaterData @dataclass(frozen=True, kw_only=True) @@ -53,11 +52,11 @@ SENSORS: tuple[SuezWaterSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SuezWaterConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Suez Water sensor from a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data counter_id = entry.data[CONF_COUNTER_ID] async_add_entities( From 94941283955c88e34253256332628e9ea2754d18 Mon Sep 17 00:00:00 2001 From: Avi Miller Date: Sun, 15 Dec 2024 20:24:41 +1100 Subject: [PATCH 248/677] Bump aiolifx to 1.1.2 and add new HomeKit product prefixes (#133191) Signed-off-by: Avi Miller --- homeassistant/components/lifx/manifest.json | 5 ++++- homeassistant/generated/zeroconf.py | 12 ++++++++++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 18 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index c7d8a27a1c7..2e16eb2082b 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -23,6 +23,7 @@ "LIFX Ceiling", "LIFX Clean", "LIFX Color", + "LIFX Colour", "LIFX DLCOL", "LIFX Dlight", "LIFX DLWW", @@ -35,12 +36,14 @@ "LIFX Neon", "LIFX Nightvision", "LIFX PAR38", + "LIFX Permanent Outdoor", "LIFX Pls", "LIFX Plus", "LIFX Round", "LIFX Square", "LIFX String", "LIFX Tile", + "LIFX Tube", "LIFX White", "LIFX Z" ] @@ -48,7 +51,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.1.1", + "aiolifx==1.1.2", "aiolifx-effects==0.3.2", "aiolifx-themes==0.5.5" ] diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index e5b50841d11..2c914c2d240 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -92,6 +92,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Colour": { + "always_discover": True, + "domain": "lifx", + }, "LIFX DLCOL": { "always_discover": True, "domain": "lifx", @@ -140,6 +144,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Permanent Outdoor": { + "always_discover": True, + "domain": "lifx", + }, "LIFX Pls": { "always_discover": True, "domain": "lifx", @@ -164,6 +172,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Tube": { + "always_discover": True, + "domain": "lifx", + }, "LIFX White": { "always_discover": True, "domain": "lifx", diff --git a/requirements_all.txt b/requirements_all.txt index 4c257ba9c11..f0b050b49ea 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -286,7 +286,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.5 # homeassistant.components.lifx -aiolifx==1.1.1 +aiolifx==1.1.2 # homeassistant.components.lookin aiolookin==1.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5b33e7d3c12..7b9fafb5958 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -268,7 +268,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.5 # homeassistant.components.lifx -aiolifx==1.1.1 +aiolifx==1.1.2 # homeassistant.components.lookin aiolookin==1.0.0 From af6948a9112575ff6cf4b9a8d26aaff29cc124e7 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sun, 15 Dec 2024 10:34:33 +0100 Subject: [PATCH 249/677] Fix pydantic warnings in purpleair (#133247) --- homeassistant/components/purpleair/diagnostics.py | 2 +- tests/components/purpleair/conftest.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/purpleair/diagnostics.py b/homeassistant/components/purpleair/diagnostics.py index 30f1deeb368..f7c44b7e9b2 100644 --- a/homeassistant/components/purpleair/diagnostics.py +++ b/homeassistant/components/purpleair/diagnostics.py @@ -37,7 +37,7 @@ async def async_get_config_entry_diagnostics( return async_redact_data( { "entry": entry.as_dict(), - "data": coordinator.data.dict(), # type: ignore[deprecated] + "data": coordinator.data.model_dump(), }, TO_REDACT, ) diff --git a/tests/components/purpleair/conftest.py b/tests/components/purpleair/conftest.py index 3d6776dd12e..1809b16bd75 100644 --- a/tests/components/purpleair/conftest.py +++ b/tests/components/purpleair/conftest.py @@ -73,7 +73,7 @@ def config_entry_options_fixture() -> dict[str, Any]: @pytest.fixture(name="get_sensors_response", scope="package") def get_sensors_response_fixture() -> GetSensorsResponse: """Define a fixture to mock an aiopurpleair GetSensorsResponse object.""" - return GetSensorsResponse.parse_raw( + return GetSensorsResponse.model_validate_json( load_fixture("get_sensors_response.json", "purpleair") ) From 80e4d7ee12ea8d8052ed6993adb334f427453a9a Mon Sep 17 00:00:00 2001 From: rappenze Date: Sun, 15 Dec 2024 11:02:26 +0100 Subject: [PATCH 250/677] Fix fibaro climate hvac mode (#132508) --- homeassistant/components/fibaro/climate.py | 6 +- tests/components/fibaro/conftest.py | 56 +++++++++ tests/components/fibaro/test_climate.py | 134 +++++++++++++++++++++ 3 files changed, 193 insertions(+), 3 deletions(-) create mode 100644 tests/components/fibaro/test_climate.py diff --git a/homeassistant/components/fibaro/climate.py b/homeassistant/components/fibaro/climate.py index 2541781773c..d5605e71c73 100644 --- a/homeassistant/components/fibaro/climate.py +++ b/homeassistant/components/fibaro/climate.py @@ -272,7 +272,9 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): if isinstance(fibaro_operation_mode, str): with suppress(ValueError): return HVACMode(fibaro_operation_mode.lower()) - elif fibaro_operation_mode in OPMODES_HVAC: + # when the mode cannot be instantiated a preset_mode is selected + return HVACMode.AUTO + if fibaro_operation_mode in OPMODES_HVAC: return OPMODES_HVAC[fibaro_operation_mode] return None @@ -280,8 +282,6 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): """Set new target operation mode.""" if not self._op_mode_device: return - if self.preset_mode: - return if "setOperatingMode" in self._op_mode_device.fibaro_device.actions: self._op_mode_device.action("setOperatingMode", HA_OPMODES_HVAC[hvac_mode]) diff --git a/tests/components/fibaro/conftest.py b/tests/components/fibaro/conftest.py index 1976a8f310b..583c44a41e6 100644 --- a/tests/components/fibaro/conftest.py +++ b/tests/components/fibaro/conftest.py @@ -129,6 +129,62 @@ def mock_light() -> Mock: return light +@pytest.fixture +def mock_thermostat() -> Mock: + """Fixture for a thermostat.""" + climate = Mock() + climate.fibaro_id = 4 + climate.parent_fibaro_id = 0 + climate.name = "Test climate" + climate.room_id = 1 + climate.dead = False + climate.visible = True + climate.enabled = True + climate.type = "com.fibaro.thermostatDanfoss" + climate.base_type = "com.fibaro.device" + climate.properties = {"manufacturer": ""} + climate.actions = {"setThermostatMode": 1} + climate.supported_features = {} + climate.has_supported_thermostat_modes = True + climate.supported_thermostat_modes = ["Off", "Heat", "CustomerSpecific"] + climate.has_operating_mode = False + climate.has_thermostat_mode = True + climate.thermostat_mode = "CustomerSpecific" + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + climate.value = value_mock + return climate + + +@pytest.fixture +def mock_thermostat_with_operating_mode() -> Mock: + """Fixture for a thermostat.""" + climate = Mock() + climate.fibaro_id = 4 + climate.parent_fibaro_id = 0 + climate.name = "Test climate" + climate.room_id = 1 + climate.dead = False + climate.visible = True + climate.enabled = True + climate.type = "com.fibaro.thermostatDanfoss" + climate.base_type = "com.fibaro.device" + climate.properties = {"manufacturer": ""} + climate.actions = {"setOperationMode": 1} + climate.supported_features = {} + climate.has_supported_operating_modes = True + climate.supported_operating_modes = [0, 1, 15] + climate.has_operating_mode = True + climate.operating_mode = 15 + climate.has_thermostat_mode = False + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + climate.value = value_mock + return climate + + @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return the default mocked config entry.""" diff --git a/tests/components/fibaro/test_climate.py b/tests/components/fibaro/test_climate.py new file mode 100644 index 00000000000..31022e19a08 --- /dev/null +++ b/tests/components/fibaro/test_climate.py @@ -0,0 +1,134 @@ +"""Test the Fibaro climate platform.""" + +from unittest.mock import Mock, patch + +from homeassistant.components.climate import ClimateEntityFeature, HVACMode +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import init_integration + +from tests.common import MockConfigEntry + + +async def test_climate_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the climate creates an entity.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + entry = entity_registry.async_get("climate.room_1_test_climate_4") + assert entry + assert entry.unique_id == "hc2_111111.4" + assert entry.original_name == "Room 1 Test climate" + assert entry.supported_features == ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.PRESET_MODE + ) + + +async def test_hvac_mode_preset( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the climate state is auto when a preset is selected.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.AUTO + assert state.attributes["preset_mode"] == "CustomerSpecific" + + +async def test_hvac_mode_heat( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the preset mode is None if a hvac mode is active.""" + + # Arrange + mock_thermostat.thermostat_mode = "Heat" + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.HEAT + assert state.attributes["preset_mode"] is None + + +async def test_set_hvac_mode( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that set_hvac_mode() works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + await hass.services.async_call( + "climate", + "set_hvac_mode", + {"entity_id": "climate.room_1_test_climate_4", "hvac_mode": HVACMode.HEAT}, + blocking=True, + ) + + # Assert + mock_thermostat.execute_action.assert_called_once() + + +async def test_hvac_mode_with_operation_mode_support( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat_with_operating_mode: Mock, + mock_room: Mock, +) -> None: + """Test that operating mode works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat_with_operating_mode] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.AUTO From f8da2c3e5c98d98fd1c55b978d3b259ba45e5e0f Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Sun, 15 Dec 2024 11:04:11 +0100 Subject: [PATCH 251/677] Bump aioautomower to 2024.12.0 (#132962) --- homeassistant/components/husqvarna_automower/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../husqvarna_automower/snapshots/test_diagnostics.ambr | 7 ------- 4 files changed, 3 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index 0f35e60c219..02e87a3a772 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_push", "loggers": ["aioautomower"], "quality_scale": "silver", - "requirements": ["aioautomower==2024.10.3"] + "requirements": ["aioautomower==2024.12.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index f0b050b49ea..237b57a1438 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -201,7 +201,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.10.3 +aioautomower==2024.12.0 # homeassistant.components.azure_devops aioazuredevops==2.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7b9fafb5958..613f9793cf3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -189,7 +189,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.10.3 +aioautomower==2024.12.0 # homeassistant.components.azure_devops aioazuredevops==2.2.1 diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index ce9fc9ac01a..2dab82451a6 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -71,9 +71,7 @@ 'activity': 'parked_in_cs', 'error_code': 0, 'error_datetime': None, - 'error_datetime_naive': None, 'error_key': None, - 'error_timestamp': 0, 'inactive_reason': 'none', 'is_error_confirmable': False, 'mode': 'main_area', @@ -82,9 +80,7 @@ 'work_area_name': 'Front lawn', }), 'planner': dict({ - 'next_start': 1685991600000, 'next_start_datetime': '2023-06-05T19:00:00+02:00', - 'next_start_datetime_naive': '2023-06-05T19:00:00', 'override': dict({ 'action': 'not_active', }), @@ -141,7 +137,6 @@ 'cutting_height': 50, 'enabled': False, 'last_time_completed': '2024-08-12T05:07:49+02:00', - 'last_time_completed_naive': '2024-08-12T05:07:49', 'name': 'my_lawn', 'progress': 20, }), @@ -149,7 +144,6 @@ 'cutting_height': 50, 'enabled': True, 'last_time_completed': '2024-08-12T07:54:29+02:00', - 'last_time_completed_naive': '2024-08-12T07:54:29', 'name': 'Front lawn', 'progress': 40, }), @@ -157,7 +151,6 @@ 'cutting_height': 25, 'enabled': True, 'last_time_completed': None, - 'last_time_completed_naive': None, 'name': 'Back lawn', 'progress': None, }), From 412aa60e8f294833ec48199bf04e9f77399aed61 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Sun, 15 Dec 2024 11:05:17 +0100 Subject: [PATCH 252/677] Fix enigma2 integration for devices not reporting MAC address (#133226) --- .../components/enigma2/config_flow.py | 3 +- .../components/enigma2/coordinator.py | 29 +++++++++++------ .../components/enigma2/media_player.py | 7 +--- tests/components/enigma2/test_init.py | 32 +++++++++++++------ 4 files changed, 45 insertions(+), 26 deletions(-) diff --git a/homeassistant/components/enigma2/config_flow.py b/homeassistant/components/enigma2/config_flow.py index e9502a0f7cd..b0649a8368d 100644 --- a/homeassistant/components/enigma2/config_flow.py +++ b/homeassistant/components/enigma2/config_flow.py @@ -133,7 +133,8 @@ class Enigma2ConfigFlowHandler(ConfigFlow, domain=DOMAIN): except Exception: # noqa: BLE001 errors = {"base": "unknown"} else: - await self.async_set_unique_id(about["info"]["ifaces"][0]["mac"]) + unique_id = about["info"]["ifaces"][0]["mac"] or self.unique_id + await self.async_set_unique_id(unique_id) self._abort_if_unique_id_configured() return errors diff --git a/homeassistant/components/enigma2/coordinator.py b/homeassistant/components/enigma2/coordinator.py index a35e74f582f..d5bbf2c0ce5 100644 --- a/homeassistant/components/enigma2/coordinator.py +++ b/homeassistant/components/enigma2/coordinator.py @@ -35,6 +35,7 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): """The Enigma2 data update coordinator.""" device: OpenWebIfDevice + unique_id: str | None def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Initialize the Enigma2 data update coordinator.""" @@ -64,6 +65,10 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): name=config_entry.data[CONF_HOST], ) + # set the unique ID for the entities to the config entry unique ID + # for devices that don't report a MAC address + self.unique_id = config_entry.unique_id + async def _async_setup(self) -> None: """Provide needed data to the device info.""" @@ -71,16 +76,20 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): self.device.mac_address = about["info"]["ifaces"][0]["mac"] self.device_info["model"] = about["info"]["model"] self.device_info["manufacturer"] = about["info"]["brand"] - self.device_info[ATTR_IDENTIFIERS] = { - (DOMAIN, format_mac(iface["mac"])) - for iface in about["info"]["ifaces"] - if "mac" in iface and iface["mac"] is not None - } - self.device_info[ATTR_CONNECTIONS] = { - (CONNECTION_NETWORK_MAC, format_mac(iface["mac"])) - for iface in about["info"]["ifaces"] - if "mac" in iface and iface["mac"] is not None - } + if self.device.mac_address is not None: + self.device_info[ATTR_IDENTIFIERS] = { + (DOMAIN, format_mac(iface["mac"])) + for iface in about["info"]["ifaces"] + if "mac" in iface and iface["mac"] is not None + } + self.device_info[ATTR_CONNECTIONS] = { + (CONNECTION_NETWORK_MAC, format_mac(iface["mac"])) + for iface in about["info"]["ifaces"] + if "mac" in iface and iface["mac"] is not None + } + self.unique_id = self.device.mac_address + elif self.unique_id is not None: + self.device_info[ATTR_IDENTIFIERS] = {(DOMAIN, self.unique_id)} async def _async_update_data(self) -> OpenWebIfStatus: await self.device.update() diff --git a/homeassistant/components/enigma2/media_player.py b/homeassistant/components/enigma2/media_player.py index 8287e055814..ee0de15c3fb 100644 --- a/homeassistant/components/enigma2/media_player.py +++ b/homeassistant/components/enigma2/media_player.py @@ -4,7 +4,6 @@ from __future__ import annotations import contextlib from logging import getLogger -from typing import cast from aiohttp.client_exceptions import ServerDisconnectedError from openwebif.enums import PowerState, RemoteControlCodes, SetVolumeOption @@ -15,7 +14,6 @@ from homeassistant.components.media_player import ( MediaPlayerState, MediaType, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -65,10 +63,7 @@ class Enigma2Device(CoordinatorEntity[Enigma2UpdateCoordinator], MediaPlayerEnti super().__init__(coordinator) - self._attr_unique_id = ( - coordinator.device.mac_address - or cast(ConfigEntry, coordinator.config_entry).entry_id - ) + self._attr_unique_id = coordinator.unique_id self._attr_device_info = coordinator.device_info diff --git a/tests/components/enigma2/test_init.py b/tests/components/enigma2/test_init.py index ab19c2ce51a..d12f96d4b0f 100644 --- a/tests/components/enigma2/test_init.py +++ b/tests/components/enigma2/test_init.py @@ -5,23 +5,37 @@ from unittest.mock import patch from homeassistant.components.enigma2.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from .conftest import TEST_REQUIRED, MockDevice from tests.common import MockConfigEntry +async def test_device_without_mac_address( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test that a device gets successfully registered when the device doesn't report a MAC address.""" + mock_device = MockDevice() + mock_device.mac_address = None + with patch( + "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", + return_value=mock_device, + ): + entry = MockConfigEntry( + domain=DOMAIN, data=TEST_REQUIRED, title="name", unique_id="123456" + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert device_registry.async_get_device({(DOMAIN, entry.unique_id)}) is not None + + async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" - with ( - patch( - "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", - return_value=MockDevice(), - ), - patch( - "homeassistant.components.enigma2.media_player.async_setup_entry", - return_value=True, - ), + with patch( + "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", + return_value=MockDevice(), ): entry = MockConfigEntry(domain=DOMAIN, data=TEST_REQUIRED, title="name") entry.add_to_hass(hass) From 879d809e5a0f1dd827c5e91f91e991b716937ab4 Mon Sep 17 00:00:00 2001 From: rappenze Date: Sun, 15 Dec 2024 11:47:18 +0100 Subject: [PATCH 253/677] Enhance translation strings in fibaro (#133234) --- homeassistant/components/fibaro/strings.json | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/fibaro/strings.json b/homeassistant/components/fibaro/strings.json index de875176cdb..99f718d545c 100644 --- a/homeassistant/components/fibaro/strings.json +++ b/homeassistant/components/fibaro/strings.json @@ -3,16 +3,25 @@ "step": { "user": { "data": { - "url": "URL in the format http://HOST/api/", + "url": "[%key:common::config_flow::data::url%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "import_plugins": "Import entities from fibaro plugins?" + "import_plugins": "Import entities from fibaro plugins / quickapps" + }, + "data_description": { + "url": "The URL of the Fibaro hub in the format `http(s)://IP`.", + "username": "The username of the Fibaro hub user.", + "password": "The password of the Fibaro hub user.", + "import_plugins": "Select if entities from Fibaro plugins / quickapps should be imported." } }, "reauth_confirm": { "data": { "password": "[%key:common::config_flow::data::password%]" }, + "data_description": { + "password": "[%key:component::fibaro::config::step::user::data_description::password%]" + }, "title": "[%key:common::config_flow::title::reauth%]", "description": "Please update your password for {username}" } From 314076b85f6c848c9c254cfa9edb731b5ba15930 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Sun, 15 Dec 2024 11:48:11 +0100 Subject: [PATCH 254/677] Replace aiogithub dependency with pynecil update check (#133213) --- .strict-typing | 1 + homeassistant/components/iron_os/__init__.py | 5 ++-- .../components/iron_os/coordinator.py | 25 +++++++------------ .../components/iron_os/manifest.json | 4 +-- .../components/iron_os/quality_scale.yaml | 2 +- mypy.ini | 10 ++++++++ requirements_all.txt | 1 - requirements_test_all.txt | 1 - tests/components/iron_os/conftest.py | 21 +++++++--------- tests/components/iron_os/test_update.py | 8 +++--- 10 files changed, 38 insertions(+), 40 deletions(-) diff --git a/.strict-typing b/.strict-typing index 66dae130fb5..899b22af35f 100644 --- a/.strict-typing +++ b/.strict-typing @@ -271,6 +271,7 @@ homeassistant.components.ios.* homeassistant.components.iotty.* homeassistant.components.ipp.* homeassistant.components.iqvia.* +homeassistant.components.iron_os.* homeassistant.components.islamic_prayer_times.* homeassistant.components.isy994.* homeassistant.components.jellyfin.* diff --git a/homeassistant/components/iron_os/__init__.py b/homeassistant/components/iron_os/__init__.py index 225bf0ff582..0fe5acc2db6 100644 --- a/homeassistant/components/iron_os/__init__.py +++ b/homeassistant/components/iron_os/__init__.py @@ -5,8 +5,7 @@ from __future__ import annotations import logging from typing import TYPE_CHECKING -from aiogithubapi import GitHubAPI -from pynecil import Pynecil +from pynecil import IronOSUpdate, Pynecil from homeassistant.components import bluetooth from homeassistant.config_entries import ConfigEntry @@ -48,7 +47,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up IronOS firmware update coordinator.""" session = async_get_clientsession(hass) - github = GitHubAPI(session=session) + github = IronOSUpdate(session) hass.data[IRON_OS_KEY] = IronOSFirmwareUpdateCoordinator(hass, github) await hass.data[IRON_OS_KEY].async_request_refresh() diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index 82c7c3b99cd..e8ddef43bd7 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -5,15 +5,16 @@ from __future__ import annotations from dataclasses import dataclass from datetime import timedelta import logging -from typing import TYPE_CHECKING -from aiogithubapi import GitHubAPI, GitHubException, GitHubReleaseModel from pynecil import ( CommunicationError, DeviceInfoResponse, + IronOSUpdate, + LatestRelease, LiveDataResponse, Pynecil, SettingsDataResponse, + UpdateException, ) from homeassistant.config_entries import ConfigEntry @@ -104,10 +105,10 @@ class IronOSLiveDataCoordinator(IronOSBaseCoordinator[LiveDataResponse]): return False -class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[GitHubReleaseModel]): +class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[LatestRelease]): """IronOS coordinator for retrieving update information from github.""" - def __init__(self, hass: HomeAssistant, github: GitHubAPI) -> None: + def __init__(self, hass: HomeAssistant, github: IronOSUpdate) -> None: """Initialize IronOS coordinator.""" super().__init__( hass, @@ -118,21 +119,13 @@ class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[GitHubReleaseModel]) ) self.github = github - async def _async_update_data(self) -> GitHubReleaseModel: + async def _async_update_data(self) -> LatestRelease: """Fetch data from Github.""" try: - release = await self.github.repos.releases.latest("Ralim/IronOS") - - except GitHubException as e: - raise UpdateFailed( - "Failed to retrieve latest release data from Github" - ) from e - - if TYPE_CHECKING: - assert release.data - - return release.data + return await self.github.latest_release() + except UpdateException as e: + raise UpdateFailed("Failed to check for latest IronOS update") from e class IronOSSettingsCoordinator(IronOSBaseCoordinator[SettingsDataResponse]): diff --git a/homeassistant/components/iron_os/manifest.json b/homeassistant/components/iron_os/manifest.json index 982fae16cc4..8556d1e3609 100644 --- a/homeassistant/components/iron_os/manifest.json +++ b/homeassistant/components/iron_os/manifest.json @@ -12,6 +12,6 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/iron_os", "iot_class": "local_polling", - "loggers": ["pynecil", "aiogithubapi"], - "requirements": ["pynecil==2.1.0", "aiogithubapi==24.6.0"] + "loggers": ["pynecil"], + "requirements": ["pynecil==2.1.0"] } diff --git a/homeassistant/components/iron_os/quality_scale.yaml b/homeassistant/components/iron_os/quality_scale.yaml index b793af1815f..a379e7965b3 100644 --- a/homeassistant/components/iron_os/quality_scale.yaml +++ b/homeassistant/components/iron_os/quality_scale.yaml @@ -81,4 +81,4 @@ rules: inject-websession: status: exempt comment: Device doesn't make http requests. - strict-typing: todo + strict-typing: done diff --git a/mypy.ini b/mypy.ini index 6daf54a8eb7..e76bc97585c 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2465,6 +2465,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.iron_os.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.islamic_prayer_times.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 237b57a1438..9cdc7021f53 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -252,7 +252,6 @@ aioflo==2021.11.0 aioftp==0.21.3 # homeassistant.components.github -# homeassistant.components.iron_os aiogithubapi==24.6.0 # homeassistant.components.guardian diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 613f9793cf3..70b6674edc8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -237,7 +237,6 @@ aioesphomeapi==28.0.0 aioflo==2021.11.0 # homeassistant.components.github -# homeassistant.components.iron_os aiogithubapi==24.6.0 # homeassistant.components.guardian diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py index eda9c2c5d1d..9091694e6a5 100644 --- a/tests/components/iron_os/conftest.py +++ b/tests/components/iron_os/conftest.py @@ -7,6 +7,7 @@ from bleak.backends.device import BLEDevice from habluetooth import BluetoothServiceInfoBleak from pynecil import ( DeviceInfoResponse, + LatestRelease, LiveDataResponse, OperatingMode, PowerSource, @@ -114,24 +115,20 @@ def mock_ble_device() -> Generator[MagicMock]: @pytest.fixture(autouse=True) -def mock_githubapi() -> Generator[AsyncMock]: - """Mock aiogithubapi.""" +def mock_ironosupdate() -> Generator[AsyncMock]: + """Mock IronOSUpdate.""" with patch( - "homeassistant.components.iron_os.GitHubAPI", + "homeassistant.components.iron_os.IronOSUpdate", autospec=True, ) as mock_client: client = mock_client.return_value - client.repos.releases.latest = AsyncMock() - - client.repos.releases.latest.return_value.data.html_url = ( - "https://github.com/Ralim/IronOS/releases/tag/v2.22" + client.latest_release.return_value = LatestRelease( + html_url="https://github.com/Ralim/IronOS/releases/tag/v2.22", + name="V2.22 | TS101 & S60 Added | PinecilV2 improved", + tag_name="v2.22", + body="**RELEASE_NOTES**", ) - client.repos.releases.latest.return_value.data.name = ( - "V2.22 | TS101 & S60 Added | PinecilV2 improved" - ) - client.repos.releases.latest.return_value.data.tag_name = "v2.22" - client.repos.releases.latest.return_value.data.body = "**RELEASE_NOTES**" yield client diff --git a/tests/components/iron_os/test_update.py b/tests/components/iron_os/test_update.py index 7a2650ba7a3..47f3197da0e 100644 --- a/tests/components/iron_os/test_update.py +++ b/tests/components/iron_os/test_update.py @@ -3,7 +3,7 @@ from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, patch -from aiogithubapi import GitHubException +from pynecil import UpdateException import pytest from syrupy.assertion import SnapshotAssertion @@ -26,7 +26,7 @@ async def update_only() -> AsyncGenerator[None]: yield -@pytest.mark.usefixtures("mock_pynecil", "ble_device", "mock_githubapi") +@pytest.mark.usefixtures("mock_pynecil", "ble_device", "mock_ironosupdate") async def test_update( hass: HomeAssistant, config_entry: MockConfigEntry, @@ -60,11 +60,11 @@ async def test_update( async def test_update_unavailable( hass: HomeAssistant, config_entry: MockConfigEntry, - mock_githubapi: AsyncMock, + mock_ironosupdate: AsyncMock, ) -> None: """Test update entity unavailable on error.""" - mock_githubapi.repos.releases.latest.side_effect = GitHubException + mock_ironosupdate.latest_release.side_effect = UpdateException config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) From 14a61d94e2fb3bcf8e5661ec6bfa9a0b94a3a905 Mon Sep 17 00:00:00 2001 From: rappenze Date: Sun, 15 Dec 2024 11:49:23 +0100 Subject: [PATCH 255/677] Use entry.runtime_data in fibaro (#133235) --- homeassistant/components/fibaro/__init__.py | 16 ++++++++-------- homeassistant/components/fibaro/binary_sensor.py | 8 +++----- homeassistant/components/fibaro/climate.py | 8 +++----- homeassistant/components/fibaro/cover.py | 8 +++----- homeassistant/components/fibaro/event.py | 8 +++----- homeassistant/components/fibaro/light.py | 8 +++----- homeassistant/components/fibaro/lock.py | 8 +++----- homeassistant/components/fibaro/scene.py | 7 +++---- homeassistant/components/fibaro/sensor.py | 8 +++----- homeassistant/components/fibaro/switch.py | 8 +++----- 10 files changed, 35 insertions(+), 52 deletions(-) diff --git a/homeassistant/components/fibaro/__init__.py b/homeassistant/components/fibaro/__init__.py index 18b9f46eb20..8ede0169482 100644 --- a/homeassistant/components/fibaro/__init__.py +++ b/homeassistant/components/fibaro/__init__.py @@ -28,8 +28,9 @@ from homeassistant.util import slugify from .const import CONF_IMPORT_PLUGINS, DOMAIN -_LOGGER = logging.getLogger(__name__) +type FibaroConfigEntry = ConfigEntry[FibaroController] +_LOGGER = logging.getLogger(__name__) PLATFORMS = [ Platform.BINARY_SENSOR, @@ -381,7 +382,7 @@ def init_controller(data: Mapping[str, Any]) -> FibaroController: return controller -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bool: """Set up the Fibaro Component. The unique id of the config entry is the serial number of the home center. @@ -395,7 +396,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except FibaroAuthFailed as auth_ex: raise ConfigEntryAuthFailed from auth_ex - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = controller + entry.runtime_data = controller # register the hub device info separately as the hub has sometimes no entities device_registry = dr.async_get(hass) @@ -417,25 +418,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bool: """Unload a config entry.""" _LOGGER.debug("Shutting down Fibaro connection") unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - hass.data[DOMAIN][entry.entry_id].disable_state_handler() - hass.data[DOMAIN].pop(entry.entry_id) + entry.runtime_data.disable_state_handler() return unload_ok async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry + hass: HomeAssistant, config_entry: FibaroConfigEntry, device_entry: DeviceEntry ) -> bool: """Remove a device entry from fibaro integration. Only removing devices which are not present anymore are eligible to be removed. """ - controller: FibaroController = hass.data[DOMAIN][config_entry.entry_id] + controller = config_entry.runtime_data for identifiers in controller.get_all_device_identifiers(): if device_entry.identifiers == identifiers: # Fibaro device is still served by the controller, diff --git a/homeassistant/components/fibaro/binary_sensor.py b/homeassistant/components/fibaro/binary_sensor.py index 9f3efbfb514..16e79c0c1d0 100644 --- a/homeassistant/components/fibaro/binary_sensor.py +++ b/homeassistant/components/fibaro/binary_sensor.py @@ -12,13 +12,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity SENSOR_TYPES = { @@ -43,11 +41,11 @@ SENSOR_TYPES = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [ FibaroBinarySensor(device) diff --git a/homeassistant/components/fibaro/climate.py b/homeassistant/components/fibaro/climate.py index d5605e71c73..45f700026a0 100644 --- a/homeassistant/components/fibaro/climate.py +++ b/homeassistant/components/fibaro/climate.py @@ -17,13 +17,11 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, Platform, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity PRESET_RESUME = "resume" @@ -111,11 +109,11 @@ OP_MODE_ACTIONS = ("setMode", "setOperatingMode", "setThermostatMode") async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [ FibaroThermostat(device) diff --git a/homeassistant/components/fibaro/cover.py b/homeassistant/components/fibaro/cover.py index 0898d1c9318..bfebbf87bd2 100644 --- a/homeassistant/components/fibaro/cover.py +++ b/homeassistant/components/fibaro/cover.py @@ -13,23 +13,21 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro covers.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroCover(device) for device in controller.fibaro_devices[Platform.COVER]], True, diff --git a/homeassistant/components/fibaro/event.py b/homeassistant/components/fibaro/event.py index c964ab283c1..a2d5da7f877 100644 --- a/homeassistant/components/fibaro/event.py +++ b/homeassistant/components/fibaro/event.py @@ -10,23 +10,21 @@ from homeassistant.components.event import ( EventDeviceClass, EventEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro event entities.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data # Each scene event represents a button on a device async_add_entities( diff --git a/homeassistant/components/fibaro/light.py b/homeassistant/components/fibaro/light.py index 18f86b6df7d..d40e26244f3 100644 --- a/homeassistant/components/fibaro/light.py +++ b/homeassistant/components/fibaro/light.py @@ -17,13 +17,11 @@ from homeassistant.components.light import ( brightness_supported, color_supported, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity PARALLEL_UPDATES = 2 @@ -52,11 +50,11 @@ def scaleto99(value: int | None) -> int: async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroLight(device) for device in controller.fibaro_devices[Platform.LIGHT]], True, diff --git a/homeassistant/components/fibaro/lock.py b/homeassistant/components/fibaro/lock.py index 55583d2a967..62a9dfa43b1 100644 --- a/homeassistant/components/fibaro/lock.py +++ b/homeassistant/components/fibaro/lock.py @@ -7,23 +7,21 @@ from typing import Any from pyfibaro.fibaro_device import DeviceModel from homeassistant.components.lock import ENTITY_ID_FORMAT, LockEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro locks.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroLock(device) for device in controller.fibaro_devices[Platform.LOCK]], True, diff --git a/homeassistant/components/fibaro/scene.py b/homeassistant/components/fibaro/scene.py index a40a1ef5b57..a4c0f1bd7f1 100644 --- a/homeassistant/components/fibaro/scene.py +++ b/homeassistant/components/fibaro/scene.py @@ -7,23 +7,22 @@ from typing import Any from pyfibaro.fibaro_scene import SceneModel from homeassistant.components.scene import Scene -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import slugify -from . import FibaroController +from . import FibaroConfigEntry, FibaroController from .const import DOMAIN async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Perform the setup for Fibaro scenes.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroScene(scene, controller) for scene in controller.read_scenes()], True, diff --git a/homeassistant/components/fibaro/sensor.py b/homeassistant/components/fibaro/sensor.py index da94cde9ead..245a0d087d8 100644 --- a/homeassistant/components/fibaro/sensor.py +++ b/homeassistant/components/fibaro/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, @@ -27,8 +26,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import convert -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity # List of known sensors which represents a fibaro device @@ -103,12 +101,12 @@ FIBARO_TO_HASS_UNIT: dict[str, str] = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro controller devices.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data entities: list[SensorEntity] = [ FibaroSensor(device, MAIN_SENSOR_TYPES.get(device.type)) for device in controller.fibaro_devices[Platform.SENSOR] diff --git a/homeassistant/components/fibaro/switch.py b/homeassistant/components/fibaro/switch.py index 1ad933f5d20..f67683dff6a 100644 --- a/homeassistant/components/fibaro/switch.py +++ b/homeassistant/components/fibaro/switch.py @@ -7,23 +7,21 @@ from typing import Any from pyfibaro.fibaro_device import DeviceModel from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import FibaroController -from .const import DOMAIN +from . import FibaroConfigEntry from .entity import FibaroEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: FibaroConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fibaro switches.""" - controller: FibaroController = hass.data[DOMAIN][entry.entry_id] + controller = entry.runtime_data async_add_entities( [FibaroSwitch(device) for device in controller.fibaro_devices[Platform.SWITCH]], True, From 73cb3fa88dda485ca38746c3569df3ada3e7821e Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 15 Dec 2024 11:55:33 +0100 Subject: [PATCH 256/677] Fix lingering mqtt device_trigger unload entry test (#133202) --- tests/components/mqtt/test_device_trigger.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 009a0315029..5cdfb14a5cf 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -2,6 +2,7 @@ import json from typing import Any +from unittest.mock import patch import pytest from pytest_unordered import unordered @@ -1692,14 +1693,19 @@ async def test_trigger_debug_info( assert debug_info_data["triggers"][0]["discovery_data"]["payload"] == config2 -@pytest.mark.usefixtures("mqtt_mock") +@patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.INITIAL_SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.SUBSCRIBE_COOLDOWN", 0.0) +@patch("homeassistant.components.mqtt.client.UNSUBSCRIBE_COOLDOWN", 0.0) async def test_unload_entry( hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test unloading the MQTT entry.""" + await mqtt_mock_entry() data1 = ( '{ "automation_type":"trigger",' ' "device":{"identifiers":["0AFFD2"]},' @@ -1733,6 +1739,7 @@ async def test_unload_entry( ] }, ) + await hass.async_block_till_done() # Fake short press 1 async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") From ebc8ca8419c534795afff15f2d184d3d14176b2e Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 15 Dec 2024 12:10:54 +0100 Subject: [PATCH 257/677] Replace "this" with "a" to fix Install Update action description (#133210) --- homeassistant/components/update/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/update/strings.json b/homeassistant/components/update/strings.json index eb6db257bb2..5194965cf69 100644 --- a/homeassistant/components/update/strings.json +++ b/homeassistant/components/update/strings.json @@ -56,7 +56,7 @@ "services": { "install": { "name": "Install update", - "description": "Installs an update for this device or service.", + "description": "Installs an update for a device or service.", "fields": { "version": { "name": "Version", @@ -64,7 +64,7 @@ }, "backup": { "name": "Backup", - "description": "If supported by the integration, this creates a backup before starting the update ." + "description": "If supported by the integration, this creates a backup before starting the update." } } }, From 8953ac13574eea3655409cdc6d8d638d152e2558 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sun, 15 Dec 2024 12:16:10 +0100 Subject: [PATCH 258/677] Improve BMW translations (#133236) --- .../components/bmw_connected_drive/button.py | 9 +++-- .../bmw_connected_drive/coordinator.py | 29 ++++++++++++--- .../bmw_connected_drive/device_tracker.py | 4 +- .../components/bmw_connected_drive/lock.py | 14 +++++-- .../components/bmw_connected_drive/notify.py | 10 +++-- .../components/bmw_connected_drive/number.py | 8 +++- .../components/bmw_connected_drive/select.py | 8 +++- .../bmw_connected_drive/strings.json | 27 +++++++++++++- .../components/bmw_connected_drive/switch.py | 16 +++++--- .../bmw_connected_drive/__init__.py | 5 +++ .../bmw_connected_drive/test_button.py | 12 ++++-- .../bmw_connected_drive/test_lock.py | 11 ++++-- .../bmw_connected_drive/test_notify.py | 19 ++++++---- .../bmw_connected_drive/test_number.py | 37 +++++++++++++++---- .../bmw_connected_drive/test_select.py | 37 +++++++++++++++---- .../bmw_connected_drive/test_switch.py | 27 ++++++++++---- 16 files changed, 209 insertions(+), 64 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/button.py b/homeassistant/components/bmw_connected_drive/button.py index 1b3043a2dcb..a7c31d0ef79 100644 --- a/homeassistant/components/bmw_connected_drive/button.py +++ b/homeassistant/components/bmw_connected_drive/button.py @@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .entity import BMWBaseEntity if TYPE_CHECKING: @@ -55,7 +55,6 @@ BUTTON_TYPES: tuple[BMWButtonEntityDescription, ...] = ( BMWButtonEntityDescription( key="deactivate_air_conditioning", translation_key="deactivate_air_conditioning", - name="Deactivate air conditioning", remote_function=lambda vehicle: vehicle.remote_services.trigger_remote_air_conditioning_stop(), is_available=lambda vehicle: vehicle.is_remote_climate_stop_enabled, ), @@ -111,6 +110,10 @@ class BMWButton(BMWBaseEntity, ButtonEntity): try: await self.entity_description.remote_function(self.vehicle) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index 3828a827e68..815bf3393e4 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -22,7 +22,13 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util.ssl import get_default_context -from .const import CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN, SCAN_INTERVALS +from .const import ( + CONF_GCID, + CONF_READ_ONLY, + CONF_REFRESH_TOKEN, + DOMAIN as BMW_DOMAIN, + SCAN_INTERVALS, +) _LOGGER = logging.getLogger(__name__) @@ -57,7 +63,7 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): hass, _LOGGER, config_entry=config_entry, - name=f"{DOMAIN}-{config_entry.data[CONF_USERNAME]}", + name=f"{BMW_DOMAIN}-{config_entry.data[CONF_USERNAME]}", update_interval=timedelta( seconds=SCAN_INTERVALS[config_entry.data[CONF_REGION]] ), @@ -75,18 +81,29 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): except MyBMWCaptchaMissingError as err: # If a captcha is required (user/password login flow), always trigger the reauth flow raise ConfigEntryAuthFailed( - translation_domain=DOMAIN, + translation_domain=BMW_DOMAIN, translation_key="missing_captcha", ) from err except MyBMWAuthError as err: # Allow one retry interval before raising AuthFailed to avoid flaky API issues if self.last_update_success: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=BMW_DOMAIN, + translation_key="update_failed", + translation_placeholders={"exception": str(err)}, + ) from err # Clear refresh token and trigger reauth if previous update failed as well self._update_config_entry_refresh_token(None) - raise ConfigEntryAuthFailed(err) from err + raise ConfigEntryAuthFailed( + translation_domain=BMW_DOMAIN, + translation_key="invalid_auth", + ) from err except (MyBMWAPIError, RequestError) as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=BMW_DOMAIN, + translation_key="update_failed", + translation_placeholders={"exception": str(err)}, + ) from err if self.account.refresh_token != old_refresh_token: self._update_config_entry_refresh_token(self.account.refresh_token) diff --git a/homeassistant/components/bmw_connected_drive/device_tracker.py b/homeassistant/components/bmw_connected_drive/device_tracker.py index f53cd72d5de..74df8693f7a 100644 --- a/homeassistant/components/bmw_connected_drive/device_tracker.py +++ b/homeassistant/components/bmw_connected_drive/device_tracker.py @@ -49,7 +49,7 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity): _attr_force_update = False _attr_translation_key = "car" - _attr_icon = "mdi:car" + _attr_name = None def __init__( self, @@ -58,9 +58,7 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity): ) -> None: """Initialize the Tracker.""" super().__init__(coordinator, vehicle) - self._attr_unique_id = vehicle.vin - self._attr_name = None @property def extra_state_attributes(self) -> dict[str, Any]: diff --git a/homeassistant/components/bmw_connected_drive/lock.py b/homeassistant/components/bmw_connected_drive/lock.py index 4aa0b411895..4bec12e796b 100644 --- a/homeassistant/components/bmw_connected_drive/lock.py +++ b/homeassistant/components/bmw_connected_drive/lock.py @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity @@ -70,7 +70,11 @@ class BMWLock(BMWBaseEntity, LockEntity): # Set the state to unknown if the command fails self._attr_is_locked = None self.async_write_ha_state() - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex finally: # Always update the listeners to get the latest state self.coordinator.async_update_listeners() @@ -90,7 +94,11 @@ class BMWLock(BMWBaseEntity, LockEntity): # Set the state to unknown if the command fails self._attr_is_locked = None self.async_write_ha_state() - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex finally: # Always update the listeners to get the latest state self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/notify.py b/homeassistant/components/bmw_connected_drive/notify.py index 04b9fa594e4..dfa0939e81f 100644 --- a/homeassistant/components/bmw_connected_drive/notify.py +++ b/homeassistant/components/bmw_connected_drive/notify.py @@ -20,7 +20,7 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import DOMAIN, BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry PARALLEL_UPDATES = 1 @@ -92,7 +92,7 @@ class BMWNotificationService(BaseNotificationService): except (vol.Invalid, TypeError, ValueError) as ex: raise ServiceValidationError( - translation_domain=DOMAIN, + translation_domain=BMW_DOMAIN, translation_key="invalid_poi", translation_placeholders={ "poi_exception": str(ex), @@ -106,4 +106,8 @@ class BMWNotificationService(BaseNotificationService): try: await vehicle.remote_services.trigger_send_poi(poi) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex diff --git a/homeassistant/components/bmw_connected_drive/number.py b/homeassistant/components/bmw_connected_drive/number.py index 7181bad76e0..c6a328ecc20 100644 --- a/homeassistant/components/bmw_connected_drive/number.py +++ b/homeassistant/components/bmw_connected_drive/number.py @@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity @@ -109,6 +109,10 @@ class BMWNumber(BMWBaseEntity, NumberEntity): try: await self.entity_description.remote_service(self.vehicle, value) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/select.py b/homeassistant/components/bmw_connected_drive/select.py index 7091cbc6817..385b45fd9fa 100644 --- a/homeassistant/components/bmw_connected_drive/select.py +++ b/homeassistant/components/bmw_connected_drive/select.py @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity @@ -123,6 +123,10 @@ class BMWSelect(BMWBaseEntity, SelectEntity): try: await self.entity_description.remote_service(self.vehicle, option) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/homeassistant/components/bmw_connected_drive/strings.json b/homeassistant/components/bmw_connected_drive/strings.json index 93abce5d73f..edb0d5cfb12 100644 --- a/homeassistant/components/bmw_connected_drive/strings.json +++ b/homeassistant/components/bmw_connected_drive/strings.json @@ -2,11 +2,16 @@ "config": { "step": { "user": { - "description": "Enter your MyBMW/MINI Connected credentials.", + "description": "Connect to your MyBMW/MINI Connected account to retrieve vehicle data.", "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", "region": "ConnectedDrive Region" + }, + "data_description": { + "username": "The email address of your MyBMW/MINI Connected account.", + "password": "The password of your MyBMW/MINI Connected account.", + "region": "The region of your MyBMW/MINI Connected account." } }, "captcha": { @@ -23,6 +28,9 @@ "description": "Update your MyBMW/MINI Connected password for account `{username}` in region `{region}`.", "data": { "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::bmw_connected_drive::config::step::user::data_description::password%]" } } }, @@ -41,7 +49,10 @@ "step": { "account_options": { "data": { - "read_only": "Read-only (only sensors and notify, no execution of services, no lock)" + "read_only": "Read-only mode" + }, + "data_description": { + "read_only": "Only retrieve values and send POI data, but don't offer any services that can change the vehicle state." } } } @@ -83,6 +94,9 @@ "activate_air_conditioning": { "name": "Activate air conditioning" }, + "deactivate_air_conditioning": { + "name": "Deactivate air conditioning" + }, "find_vehicle": { "name": "Find vehicle" } @@ -220,6 +234,15 @@ }, "missing_captcha": { "message": "Login requires captcha validation" + }, + "invalid_auth": { + "message": "[%key:common::config_flow::error::invalid_auth%]" + }, + "remote_service_error": { + "message": "Error executing remote service on vehicle. {exception}" + }, + "update_failed": { + "message": "Error updating vehicle data. {exception}" } } } diff --git a/homeassistant/components/bmw_connected_drive/switch.py b/homeassistant/components/bmw_connected_drive/switch.py index 826f6b840b2..600ad41165a 100644 --- a/homeassistant/components/bmw_connected_drive/switch.py +++ b/homeassistant/components/bmw_connected_drive/switch.py @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import BMWConfigEntry +from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry from .coordinator import BMWDataUpdateCoordinator from .entity import BMWBaseEntity @@ -111,8 +111,11 @@ class BMWSwitch(BMWBaseEntity, SwitchEntity): try: await self.entity_description.remote_service_on(self.vehicle) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex - + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() async def async_turn_off(self, **kwargs: Any) -> None: @@ -120,6 +123,9 @@ class BMWSwitch(BMWBaseEntity, SwitchEntity): try: await self.entity_description.remote_service_off(self.vehicle) except MyBMWAPIError as ex: - raise HomeAssistantError(ex) from ex - + raise HomeAssistantError( + translation_domain=BMW_DOMAIN, + translation_key="remote_service_error", + translation_placeholders={"exception": str(ex)}, + ) from ex self.coordinator.async_update_listeners() diff --git a/tests/components/bmw_connected_drive/__init__.py b/tests/components/bmw_connected_drive/__init__.py index f490b854749..c437e1d3669 100644 --- a/tests/components/bmw_connected_drive/__init__.py +++ b/tests/components/bmw_connected_drive/__init__.py @@ -48,6 +48,11 @@ FIXTURE_CONFIG_ENTRY = { "unique_id": f"{FIXTURE_USER_INPUT[CONF_REGION]}-{FIXTURE_USER_INPUT[CONF_USERNAME]}", } +REMOTE_SERVICE_EXC_REASON = "HTTPStatusError: 502 Bad Gateway" +REMOTE_SERVICE_EXC_TRANSLATION = ( + "Error executing remote service on vehicle. HTTPStatusError: 502 Bad Gateway" +) + async def setup_mocked_integration(hass: HomeAssistant) -> MockConfigEntry: """Mock a fully setup config entry and all components based on fixtures.""" diff --git a/tests/components/bmw_connected_drive/test_button.py b/tests/components/bmw_connected_drive/test_button.py index 88c7990cde9..356cfcb439e 100644 --- a/tests/components/bmw_connected_drive/test_button.py +++ b/tests/components/bmw_connected_drive/test_button.py @@ -13,7 +13,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -81,11 +85,13 @@ async def test_service_call_fail( monkeypatch.setattr( RemoteServices, "trigger_remote_service", - AsyncMock(side_effect=MyBMWRemoteServiceError), + AsyncMock( + side_effect=MyBMWRemoteServiceError("HTTPStatusError: 502 Bad Gateway") + ), ) # Test - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError, match=REMOTE_SERVICE_EXC_TRANSLATION): await hass.services.async_call( "button", "press", diff --git a/tests/components/bmw_connected_drive/test_lock.py b/tests/components/bmw_connected_drive/test_lock.py index 2fa694d426b..088534c79f5 100644 --- a/tests/components/bmw_connected_drive/test_lock.py +++ b/tests/components/bmw_connected_drive/test_lock.py @@ -16,7 +16,12 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform from tests.components.recorder.common import async_wait_recording_done @@ -118,11 +123,11 @@ async def test_service_call_fail( monkeypatch.setattr( RemoteServices, "trigger_remote_service", - AsyncMock(side_effect=MyBMWRemoteServiceError), + AsyncMock(side_effect=MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON)), ) # Test - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError, match=REMOTE_SERVICE_EXC_TRANSLATION): await hass.services.async_call( "lock", service, diff --git a/tests/components/bmw_connected_drive/test_notify.py b/tests/components/bmw_connected_drive/test_notify.py index 4113f618be0..1bade3be011 100644 --- a/tests/components/bmw_connected_drive/test_notify.py +++ b/tests/components/bmw_connected_drive/test_notify.py @@ -11,7 +11,11 @@ import respx from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) async def test_legacy_notify_service_simple( @@ -68,21 +72,21 @@ async def test_legacy_notify_service_simple( { "latitude": POI_DATA.get("lat"), }, - "Invalid data for point of interest: required key not provided @ data['longitude']", + r"Invalid data for point of interest: required key not provided @ data\['longitude'\]", ), ( { "latitude": POI_DATA.get("lat"), "longitude": "text", }, - "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", + r"Invalid data for point of interest: invalid longitude for dictionary value @ data\['longitude'\]", ), ( { "latitude": POI_DATA.get("lat"), "longitude": 9999, }, - "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", + r"Invalid data for point of interest: invalid longitude for dictionary value @ data\['longitude'\]", ), ], ) @@ -96,7 +100,7 @@ async def test_service_call_invalid_input( # Setup component assert await setup_mocked_integration(hass) - with pytest.raises(ServiceValidationError) as exc: + with pytest.raises(ServiceValidationError, match=exc_translation): await hass.services.async_call( "notify", "bmw_connected_drive_ix_xdrive50", @@ -106,7 +110,6 @@ async def test_service_call_invalid_input( }, blocking=True, ) - assert str(exc.value) == exc_translation @pytest.mark.usefixtures("bmw_fixture") @@ -132,11 +135,11 @@ async def test_service_call_fail( monkeypatch.setattr( RemoteServices, "trigger_remote_service", - AsyncMock(side_effect=raised), + AsyncMock(side_effect=raised("HTTPStatusError: 502 Bad Gateway")), ) # Test - with pytest.raises(expected): + with pytest.raises(expected, match=REMOTE_SERVICE_EXC_TRANSLATION): await hass.services.async_call( "notify", "bmw_connected_drive_ix_xdrive50", diff --git a/tests/components/bmw_connected_drive/test_number.py b/tests/components/bmw_connected_drive/test_number.py index f2a50ce4df6..733f4fe3113 100644 --- a/tests/components/bmw_connected_drive/test_number.py +++ b/tests/components/bmw_connected_drive/test_number.py @@ -13,7 +13,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -89,7 +94,10 @@ async def test_service_call_invalid_input( old_value = hass.states.get(entity_id).state # Test - with pytest.raises(ValueError): + with pytest.raises( + ValueError, + match="Target SoC must be an integer between 20 and 100 that is a multiple of 5.", + ): await hass.services.async_call( "number", "set_value", @@ -102,17 +110,32 @@ async def test_service_call_invalid_input( @pytest.mark.usefixtures("bmw_fixture") @pytest.mark.parametrize( - ("raised", "expected"), + ("raised", "expected", "exc_translation"), [ - (MyBMWRemoteServiceError, HomeAssistantError), - (MyBMWAPIError, HomeAssistantError), - (ValueError, ValueError), + ( + MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + MyBMWAPIError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + ValueError( + "Target SoC must be an integer between 20 and 100 that is a multiple of 5." + ), + ValueError, + "Target SoC must be an integer between 20 and 100 that is a multiple of 5.", + ), ], ) async def test_service_call_fail( hass: HomeAssistant, raised: Exception, expected: Exception, + exc_translation: str, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test exception handling.""" @@ -130,7 +153,7 @@ async def test_service_call_fail( ) # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "number", "set_value", diff --git a/tests/components/bmw_connected_drive/test_select.py b/tests/components/bmw_connected_drive/test_select.py index a270f38ee01..53c39f572f2 100644 --- a/tests/components/bmw_connected_drive/test_select.py +++ b/tests/components/bmw_connected_drive/test_select.py @@ -16,7 +16,12 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.translation import async_get_translations -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -105,7 +110,10 @@ async def test_service_call_invalid_input( old_value = hass.states.get(entity_id).state # Test - with pytest.raises(ServiceValidationError): + with pytest.raises( + ServiceValidationError, + match=f"Option {value} is not valid for entity {entity_id}", + ): await hass.services.async_call( "select", "select_option", @@ -118,17 +126,32 @@ async def test_service_call_invalid_input( @pytest.mark.usefixtures("bmw_fixture") @pytest.mark.parametrize( - ("raised", "expected"), + ("raised", "expected", "exc_translation"), [ - (MyBMWRemoteServiceError, HomeAssistantError), - (MyBMWAPIError, HomeAssistantError), - (ServiceValidationError, ServiceValidationError), + ( + MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + MyBMWAPIError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + ServiceValidationError( + "Option 17 is not valid for entity select.i4_edrive40_ac_charging_limit" + ), + ServiceValidationError, + "Option 17 is not valid for entity select.i4_edrive40_ac_charging_limit", + ), ], ) async def test_service_call_fail( hass: HomeAssistant, raised: Exception, expected: Exception, + exc_translation: str, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test exception handling.""" @@ -146,7 +169,7 @@ async def test_service_call_fail( ) # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "select", "select_option", diff --git a/tests/components/bmw_connected_drive/test_switch.py b/tests/components/bmw_connected_drive/test_switch.py index 58bddbfc937..c28b651abaf 100644 --- a/tests/components/bmw_connected_drive/test_switch.py +++ b/tests/components/bmw_connected_drive/test_switch.py @@ -13,7 +13,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from . import check_remote_service_call, setup_mocked_integration +from . import ( + REMOTE_SERVICE_EXC_REASON, + REMOTE_SERVICE_EXC_TRANSLATION, + check_remote_service_call, + setup_mocked_integration, +) from tests.common import snapshot_platform @@ -75,17 +80,25 @@ async def test_service_call_success( @pytest.mark.usefixtures("bmw_fixture") @pytest.mark.parametrize( - ("raised", "expected"), + ("raised", "expected", "exc_translation"), [ - (MyBMWRemoteServiceError, HomeAssistantError), - (MyBMWAPIError, HomeAssistantError), - (ValueError, ValueError), + ( + MyBMWRemoteServiceError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), + ( + MyBMWAPIError(REMOTE_SERVICE_EXC_REASON), + HomeAssistantError, + REMOTE_SERVICE_EXC_TRANSLATION, + ), ], ) async def test_service_call_fail( hass: HomeAssistant, raised: Exception, expected: Exception, + exc_translation: str, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test exception handling.""" @@ -107,7 +120,7 @@ async def test_service_call_fail( assert hass.states.get(entity_id).state == old_value # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "switch", "turn_on", @@ -122,7 +135,7 @@ async def test_service_call_fail( assert hass.states.get(entity_id).state == old_value # Test - with pytest.raises(expected): + with pytest.raises(expected, match=exc_translation): await hass.services.async_call( "switch", "turn_off", From d1e466e6150f9890547ab9afa3708163105a165f Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sun, 15 Dec 2024 12:19:25 +0100 Subject: [PATCH 259/677] Update elevenlabs to 1.9.0 (#133264) --- homeassistant/components/elevenlabs/__init__.py | 3 +-- homeassistant/components/elevenlabs/config_flow.py | 2 +- homeassistant/components/elevenlabs/manifest.json | 2 +- homeassistant/components/elevenlabs/tts.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/elevenlabs/conftest.py | 2 +- 7 files changed, 7 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/elevenlabs/__init__.py b/homeassistant/components/elevenlabs/__init__.py index 7da4802e98a..db7a7f64c97 100644 --- a/homeassistant/components/elevenlabs/__init__.py +++ b/homeassistant/components/elevenlabs/__init__.py @@ -4,8 +4,7 @@ from __future__ import annotations from dataclasses import dataclass -from elevenlabs import Model -from elevenlabs.client import AsyncElevenLabs +from elevenlabs import AsyncElevenLabs, Model from elevenlabs.core import ApiError from homeassistant.config_entries import ConfigEntry diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py index 227150a0f4e..55cdd3ea944 100644 --- a/homeassistant/components/elevenlabs/config_flow.py +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from typing import Any -from elevenlabs.client import AsyncElevenLabs +from elevenlabs import AsyncElevenLabs from elevenlabs.core import ApiError import voluptuous as vol diff --git a/homeassistant/components/elevenlabs/manifest.json b/homeassistant/components/elevenlabs/manifest.json index 968ea7b688a..eb6df09149a 100644 --- a/homeassistant/components/elevenlabs/manifest.json +++ b/homeassistant/components/elevenlabs/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["elevenlabs"], - "requirements": ["elevenlabs==1.6.1"] + "requirements": ["elevenlabs==1.9.0"] } diff --git a/homeassistant/components/elevenlabs/tts.py b/homeassistant/components/elevenlabs/tts.py index efc2154882a..8b016b6af8b 100644 --- a/homeassistant/components/elevenlabs/tts.py +++ b/homeassistant/components/elevenlabs/tts.py @@ -6,7 +6,7 @@ import logging from types import MappingProxyType from typing import Any -from elevenlabs.client import AsyncElevenLabs +from elevenlabs import AsyncElevenLabs from elevenlabs.core import ApiError from elevenlabs.types import Model, Voice as ElevenLabsVoice, VoiceSettings diff --git a/requirements_all.txt b/requirements_all.txt index 9cdc7021f53..011fedd5a5f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -815,7 +815,7 @@ eheimdigital==1.0.3 electrickiwi-api==0.8.5 # homeassistant.components.elevenlabs -elevenlabs==1.6.1 +elevenlabs==1.9.0 # homeassistant.components.elgato elgato==5.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 70b6674edc8..0f94266313c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -693,7 +693,7 @@ eheimdigital==1.0.3 electrickiwi-api==0.8.5 # homeassistant.components.elevenlabs -elevenlabs==1.6.1 +elevenlabs==1.9.0 # homeassistant.components.elgato elgato==5.1.2 diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py index c4d9a87b5ad..c9ed49ba13c 100644 --- a/tests/components/elevenlabs/conftest.py +++ b/tests/components/elevenlabs/conftest.py @@ -31,7 +31,7 @@ def mock_async_client() -> Generator[AsyncMock]: client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) client_mock.models.get_all.return_value = MOCK_MODELS with patch( - "elevenlabs.client.AsyncElevenLabs", return_value=client_mock + "elevenlabs.AsyncElevenLabs", return_value=client_mock ) as mock_async_client: yield mock_async_client From 85ef2c0fb17f85e69e8272853114c97b0af7d6e8 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 15 Dec 2024 03:19:57 -0800 Subject: [PATCH 260/677] Mark Google Tasks action-exceptions quality scale as done (#133253) --- homeassistant/components/google_tasks/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml index b4159b30145..94c81d0b7f8 100644 --- a/homeassistant/components/google_tasks/quality_scale.yaml +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -39,7 +39,7 @@ rules: reauthentication-flow: status: todo comment: Missing a test that reauthenticates with the wrong account - action-exceptions: todo + action-exceptions: done docs-installation-parameters: todo integration-owner: done parallel-updates: todo From 760c3ac98ce8bdcab3ffee3d8ba49c971081c4b4 Mon Sep 17 00:00:00 2001 From: Claudio Ruggeri - CR-Tech <41435902+crug80@users.noreply.github.com> Date: Sun, 15 Dec 2024 12:24:27 +0100 Subject: [PATCH 261/677] Bump pymodbus version 3.7.4 (#133175) Co-authored-by: Joost Lekkerkerker --- .../components/modbus/binary_sensor.py | 2 +- homeassistant/components/modbus/manifest.json | 2 +- homeassistant/components/modbus/modbus.py | 19 +++++++++---------- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/modbus/test_init.py | 4 +--- 6 files changed, 14 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/modbus/binary_sensor.py b/homeassistant/components/modbus/binary_sensor.py index b50d21faf42..97ade53762b 100644 --- a/homeassistant/components/modbus/binary_sensor.py +++ b/homeassistant/components/modbus/binary_sensor.py @@ -121,7 +121,7 @@ class ModbusBinarySensor(BasePlatform, RestoreEntity, BinarySensorEntity): else: self._attr_available = True if self._input_type in (CALL_TYPE_COIL, CALL_TYPE_DISCRETE): - self._result = result.bits + self._result = [int(bit) for bit in result.bits] else: self._result = result.registers self._attr_is_on = bool(self._result[0] & 1) diff --git a/homeassistant/components/modbus/manifest.json b/homeassistant/components/modbus/manifest.json index 7cba4692eb6..fc25a329c11 100644 --- a/homeassistant/components/modbus/manifest.json +++ b/homeassistant/components/modbus/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/modbus", "iot_class": "local_polling", "loggers": ["pymodbus"], - "requirements": ["pymodbus==3.6.9"] + "requirements": ["pymodbus==3.7.4"] } diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index 18d91f8dd3b..efce44d7979 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -14,8 +14,8 @@ from pymodbus.client import ( AsyncModbusUdpClient, ) from pymodbus.exceptions import ModbusException -from pymodbus.pdu import ModbusResponse -from pymodbus.transaction import ModbusAsciiFramer, ModbusRtuFramer, ModbusSocketFramer +from pymodbus.framer import FramerType +from pymodbus.pdu import ModbusPDU import voluptuous as vol from homeassistant.const import ( @@ -265,14 +265,13 @@ class ModbusHub: "port": client_config[CONF_PORT], "timeout": client_config[CONF_TIMEOUT], "retries": 3, - "retry_on_empty": True, } if self._config_type == SERIAL: # serial configuration if client_config[CONF_METHOD] == "ascii": - self._pb_params["framer"] = ModbusAsciiFramer + self._pb_params["framer"] = FramerType.ASCII else: - self._pb_params["framer"] = ModbusRtuFramer + self._pb_params["framer"] = FramerType.RTU self._pb_params.update( { "baudrate": client_config[CONF_BAUDRATE], @@ -285,9 +284,9 @@ class ModbusHub: # network configuration self._pb_params["host"] = client_config[CONF_HOST] if self._config_type == RTUOVERTCP: - self._pb_params["framer"] = ModbusRtuFramer + self._pb_params["framer"] = FramerType.RTU else: - self._pb_params["framer"] = ModbusSocketFramer + self._pb_params["framer"] = FramerType.SOCKET if CONF_MSG_WAIT in client_config: self._msg_wait = client_config[CONF_MSG_WAIT] / 1000 @@ -370,12 +369,12 @@ class ModbusHub: async def low_level_pb_call( self, slave: int | None, address: int, value: int | list[int], use_call: str - ) -> ModbusResponse | None: + ) -> ModbusPDU | None: """Call sync. pymodbus.""" kwargs = {"slave": slave} if slave else {} entry = self._pb_request[use_call] try: - result: ModbusResponse = await entry.func(address, value, **kwargs) + result: ModbusPDU = await entry.func(address, value, **kwargs) except ModbusException as exception_error: error = f"Error: device: {slave} address: {address} -> {exception_error!s}" self._log_error(error) @@ -403,7 +402,7 @@ class ModbusHub: address: int, value: int | list[int], use_call: str, - ) -> ModbusResponse | None: + ) -> ModbusPDU | None: """Convert async to sync pymodbus call.""" if self._config_delay: return None diff --git a/requirements_all.txt b/requirements_all.txt index 011fedd5a5f..e4b9787c641 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2091,7 +2091,7 @@ pymitv==1.4.3 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.9 +pymodbus==3.7.4 # homeassistant.components.monoprice pymonoprice==0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0f94266313c..58f6d599825 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1696,7 +1696,7 @@ pymicro-vad==1.0.1 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.9 +pymodbus==3.7.4 # homeassistant.components.monoprice pymonoprice==0.4 diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index 3b8a76f5606..0cfa7ba8b24 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -19,7 +19,7 @@ from unittest import mock from freezegun.api import FrozenDateTimeFactory from pymodbus.exceptions import ModbusException -from pymodbus.pdu import ExceptionResponse, IllegalFunctionRequest +from pymodbus.pdu import ExceptionResponse import pytest import voluptuous as vol @@ -820,7 +820,6 @@ SERVICE = "service" [ {VALUE: ReadResult([0x0001]), DATA: ""}, {VALUE: ExceptionResponse(0x06), DATA: "Pymodbus:"}, - {VALUE: IllegalFunctionRequest(0x06), DATA: "Pymodbus:"}, {VALUE: ModbusException("fail write_"), DATA: "Pymodbus:"}, ], ) @@ -928,7 +927,6 @@ async def mock_modbus_read_pymodbus_fixture( ("do_return", "do_exception", "do_expect_state", "do_expect_value"), [ (ReadResult([1]), None, STATE_ON, "1"), - (IllegalFunctionRequest(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE), (ExceptionResponse(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE), ( ReadResult([1]), From aa4b64386e462ef5379bee1480f30d3d899d3125 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sun, 15 Dec 2024 12:25:35 +0100 Subject: [PATCH 262/677] Don't update existing Fronius config entries from config flow (#132886) --- homeassistant/components/fronius/__init__.py | 2 +- .../components/fronius/config_flow.py | 2 +- tests/components/fronius/test_config_flow.py | 34 ++++++++----------- 3 files changed, 17 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/fronius/__init__.py b/homeassistant/components/fronius/__init__.py index 03d80e3b2d9..4ba893df85c 100644 --- a/homeassistant/components/fronius/__init__.py +++ b/homeassistant/components/fronius/__init__.py @@ -60,7 +60,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: FroniusConfigEntry) -> async def async_remove_config_entry_device( - hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + hass: HomeAssistant, config_entry: FroniusConfigEntry, device_entry: dr.DeviceEntry ) -> bool: """Remove a config entry from a device.""" return True diff --git a/homeassistant/components/fronius/config_flow.py b/homeassistant/components/fronius/config_flow.py index 53433e31233..ccc15d80401 100644 --- a/homeassistant/components/fronius/config_flow.py +++ b/homeassistant/components/fronius/config_flow.py @@ -87,7 +87,7 @@ class FroniusConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" else: await self.async_set_unique_id(unique_id, raise_on_progress=False) - self._abort_if_unique_id_configured(updates=dict(info)) + self._abort_if_unique_id_configured() return self.async_create_entry(title=create_title(info), data=info) diff --git a/tests/components/fronius/test_config_flow.py b/tests/components/fronius/test_config_flow.py index 5d0b93e7cd5..ed90e266b81 100644 --- a/tests/components/fronius/test_config_flow.py +++ b/tests/components/fronius/test_config_flow.py @@ -205,10 +205,10 @@ async def test_form_already_existing(hass: HomeAssistant) -> None: assert result2["reason"] == "already_configured" -async def test_form_updates_host( +async def test_config_flow_already_configured( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: - """Test existing entry gets updated.""" + """Test existing entry doesn't get updated by config flow.""" old_host = "http://10.1.0.1" new_host = "http://10.1.0.2" entry = MockConfigEntry( @@ -231,26 +231,20 @@ async def test_form_updates_host( ) mock_responses(aioclient_mock, host=new_host) - with patch( - "homeassistant.components.fronius.async_unload_entry", - return_value=True, - ) as mock_unload_entry: - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": new_host, - }, - ) - await hass.async_block_till_done() - + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": new_host, + }, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" - mock_unload_entry.assert_called_with(hass, entry) entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 assert entries[0].data == { - "host": new_host, + "host": old_host, # not updated from config flow - only from reconfigure flow "is_logger": True, } @@ -326,11 +320,13 @@ async def test_dhcp_invalid( async def test_reconfigure(hass: HomeAssistant) -> None: """Test reconfiguring an entry.""" + old_host = "http://10.1.0.1" + new_host = "http://10.1.0.2" entry = MockConfigEntry( domain=DOMAIN, unique_id="1234567", data={ - CONF_HOST: "10.1.2.3", + CONF_HOST: old_host, "is_logger": True, }, ) @@ -357,7 +353,7 @@ async def test_reconfigure(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ - "host": "10.9.1.1", + "host": new_host, }, ) await hass.async_block_till_done() @@ -365,7 +361,7 @@ async def test_reconfigure(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" assert entry.data == { - "host": "10.9.1.1", + "host": new_host, "is_logger": False, } assert len(mock_setup_entry.mock_calls) == 1 From 74e4654c26177909e653921f27f838fd1366adc0 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 15 Dec 2024 12:28:32 +0100 Subject: [PATCH 263/677] Revert "Improve recorder history queries (#131702)" (#133203) --- homeassistant/components/history/__init__.py | 7 ++-- homeassistant/components/history/helpers.py | 13 ++++---- .../components/history/websocket_api.py | 7 ++-- homeassistant/components/recorder/core.py | 1 - .../components/recorder/history/legacy.py | 18 ++++++----- .../components/recorder/history/modern.py | 31 +++++++++--------- homeassistant/components/recorder/purge.py | 3 -- homeassistant/components/recorder/queries.py | 9 ------ .../recorder/table_managers/states.py | 32 ------------------- homeassistant/components/recorder/tasks.py | 2 ++ tests/components/recorder/test_purge.py | 17 ---------- 11 files changed, 38 insertions(+), 102 deletions(-) diff --git a/homeassistant/components/history/__init__.py b/homeassistant/components/history/__init__.py index 7241e1fac9a..365be06fd2d 100644 --- a/homeassistant/components/history/__init__.py +++ b/homeassistant/components/history/__init__.py @@ -22,7 +22,7 @@ import homeassistant.util.dt as dt_util from . import websocket_api from .const import DOMAIN -from .helpers import entities_may_have_state_changes_after, has_states_before +from .helpers import entities_may_have_state_changes_after, has_recorder_run_after CONF_ORDER = "use_include_order" @@ -107,10 +107,7 @@ class HistoryPeriodView(HomeAssistantView): no_attributes = "no_attributes" in request.query if ( - # has_states_before will return True if there are states older than - # end_time. If it's false, we know there are no states in the - # database up until end_time. - (end_time and not has_states_before(hass, end_time)) + (end_time and not has_recorder_run_after(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/history/helpers.py b/homeassistant/components/history/helpers.py index 2010b7373ff..bd477e7e4ed 100644 --- a/homeassistant/components/history/helpers.py +++ b/homeassistant/components/history/helpers.py @@ -6,6 +6,7 @@ from collections.abc import Iterable from datetime import datetime as dt from homeassistant.components.recorder import get_instance +from homeassistant.components.recorder.models import process_timestamp from homeassistant.core import HomeAssistant @@ -25,10 +26,8 @@ def entities_may_have_state_changes_after( return False -def has_states_before(hass: HomeAssistant, run_time: dt) -> bool: - """Check if the recorder has states as old or older than run_time. - - Returns True if there may be such states. - """ - oldest_ts = get_instance(hass).states_manager.oldest_ts - return oldest_ts is not None and run_time.timestamp() >= oldest_ts +def has_recorder_run_after(hass: HomeAssistant, run_time: dt) -> bool: + """Check if the recorder has any runs after a specific time.""" + return run_time >= process_timestamp( + get_instance(hass).recorder_runs_manager.first.start + ) diff --git a/homeassistant/components/history/websocket_api.py b/homeassistant/components/history/websocket_api.py index 35f8ed5f1ac..c85d975c3c9 100644 --- a/homeassistant/components/history/websocket_api.py +++ b/homeassistant/components/history/websocket_api.py @@ -39,7 +39,7 @@ from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util from .const import EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES -from .helpers import entities_may_have_state_changes_after, has_states_before +from .helpers import entities_may_have_state_changes_after, has_recorder_run_after _LOGGER = logging.getLogger(__name__) @@ -142,10 +142,7 @@ async def ws_get_history_during_period( no_attributes = msg["no_attributes"] if ( - # has_states_before will return True if there are states older than - # end_time. If it's false, we know there are no states in the - # database up until end_time. - (end_time and not has_states_before(hass, end_time)) + (end_time and not has_recorder_run_after(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index a3163d5b396..76cf0a7c05e 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -1430,7 +1430,6 @@ class Recorder(threading.Thread): with session_scope(session=self.get_session()) as session: end_incomplete_runs(session, self.recorder_runs_manager.recording_start) self.recorder_runs_manager.start(session) - self.states_manager.load_from_db(session) self._open_event_session() diff --git a/homeassistant/components/recorder/history/legacy.py b/homeassistant/components/recorder/history/legacy.py index dc49ebb9768..da90b296fe3 100644 --- a/homeassistant/components/recorder/history/legacy.py +++ b/homeassistant/components/recorder/history/legacy.py @@ -22,9 +22,9 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..db_schema import StateAttributes, States +from ..db_schema import RecorderRuns, StateAttributes, States from ..filters import Filters -from ..models import process_timestamp_to_utc_isoformat +from ..models import process_timestamp, process_timestamp_to_utc_isoformat from ..models.legacy import LegacyLazyState, legacy_row_to_compressed_state from ..util import execute_stmt_lambda_element, session_scope from .const import ( @@ -436,7 +436,7 @@ def get_last_state_changes( def _get_states_for_entities_stmt( - run_start_ts: float, + run_start: datetime, utc_point_in_time: datetime, entity_ids: list[str], no_attributes: bool, @@ -447,6 +447,7 @@ def _get_states_for_entities_stmt( ) # We got an include-list of entities, accelerate the query by filtering already # in the inner query. + run_start_ts = process_timestamp(run_start).timestamp() utc_point_in_time_ts = utc_point_in_time.timestamp() stmt += lambda q: q.join( ( @@ -482,7 +483,7 @@ def _get_rows_with_session( session: Session, utc_point_in_time: datetime, entity_ids: list[str], - *, + run: RecorderRuns | None = None, no_attributes: bool = False, ) -> Iterable[Row]: """Return the states at a specific point in time.""" @@ -494,16 +495,17 @@ def _get_rows_with_session( ), ) - oldest_ts = get_instance(hass).states_manager.oldest_ts + if run is None: + run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) - if oldest_ts is None or oldest_ts > utc_point_in_time.timestamp(): - # We don't have any states for the requested time + if run is None or process_timestamp(run.start) > utc_point_in_time: + # History did not run before utc_point_in_time return [] # We have more than one entity to look at so we need to do a query on states # since the last recorder run started. stmt = _get_states_for_entities_stmt( - oldest_ts, utc_point_in_time, entity_ids, no_attributes + run.start, utc_point_in_time, entity_ids, no_attributes ) return execute_stmt_lambda_element(session, stmt) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 01551de1f28..9159bbc6181 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -34,6 +34,7 @@ from ..models import ( LazyState, datetime_to_timestamp_or_none, extract_metadata_ids, + process_timestamp, row_to_compressed_state, ) from ..util import execute_stmt_lambda_element, session_scope @@ -245,9 +246,9 @@ def get_significant_states_with_session( if metadata_id is not None and split_entity_id(entity_id)[0] in SIGNIFICANT_DOMAINS ] - oldest_ts: float | None = None + run_start_ts: float | None = None if include_start_time_state and not ( - oldest_ts := _get_oldest_possible_ts(hass, start_time) + run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) ): include_start_time_state = False start_time_ts = start_time.timestamp() @@ -263,7 +264,7 @@ def get_significant_states_with_session( significant_changes_only, no_attributes, include_start_time_state, - oldest_ts, + run_start_ts, ), track_on=[ bool(single_metadata_id), @@ -410,9 +411,9 @@ def state_changes_during_period( entity_id_to_metadata_id: dict[str, int | None] = { entity_id: single_metadata_id } - oldest_ts: float | None = None + run_start_ts: float | None = None if include_start_time_state and not ( - oldest_ts := _get_oldest_possible_ts(hass, start_time) + run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) ): include_start_time_state = False start_time_ts = start_time.timestamp() @@ -425,7 +426,7 @@ def state_changes_during_period( no_attributes, limit, include_start_time_state, - oldest_ts, + run_start_ts, has_last_reported, ), track_on=[ @@ -599,17 +600,17 @@ def _get_start_time_state_for_entities_stmt( ) -def _get_oldest_possible_ts( +def _get_run_start_ts_for_utc_point_in_time( hass: HomeAssistant, utc_point_in_time: datetime ) -> float | None: - """Return the oldest possible timestamp. - - Returns None if there are no states as old as utc_point_in_time. - """ - - oldest_ts = get_instance(hass).states_manager.oldest_ts - if oldest_ts is not None and oldest_ts < utc_point_in_time.timestamp(): - return oldest_ts + """Return the start time of a run.""" + run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) + if ( + run is not None + and (run_start := process_timestamp(run.start)) < utc_point_in_time + ): + return run_start.timestamp() + # History did not run before utc_point_in_time but we still return None diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 11f5accc978..eb67300e8d4 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -122,9 +122,6 @@ def purge_old_data( _purge_old_entity_ids(instance, session) _purge_old_recorder_runs(instance, session, purge_before) - with session_scope(session=instance.get_session(), read_only=True) as session: - instance.recorder_runs_manager.load_from_db(session) - instance.states_manager.load_from_db(session) if repack: repack_database(instance) return True diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 8ca7bef2691..2e4b588a0b0 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -637,15 +637,6 @@ def find_states_to_purge( ) -def find_oldest_state() -> StatementLambdaElement: - """Find the last_updated_ts of the oldest state.""" - return lambda_stmt( - lambda: select(States.last_updated_ts).where( - States.state_id.in_(select(func.min(States.state_id))) - ) - ) - - def find_short_term_statistics_to_purge( purge_before: datetime, max_bind_vars: int ) -> StatementLambdaElement: diff --git a/homeassistant/components/recorder/table_managers/states.py b/homeassistant/components/recorder/table_managers/states.py index fafcfa0ea61..d5cef759c54 100644 --- a/homeassistant/components/recorder/table_managers/states.py +++ b/homeassistant/components/recorder/table_managers/states.py @@ -2,15 +2,7 @@ from __future__ import annotations -from collections.abc import Sequence -from typing import Any, cast - -from sqlalchemy.engine.row import Row -from sqlalchemy.orm.session import Session - from ..db_schema import States -from ..queries import find_oldest_state -from ..util import execute_stmt_lambda_element class StatesManager: @@ -21,12 +13,6 @@ class StatesManager: self._pending: dict[str, States] = {} self._last_committed_id: dict[str, int] = {} self._last_reported: dict[int, float] = {} - self._oldest_ts: float | None = None - - @property - def oldest_ts(self) -> float | None: - """Return the oldest timestamp.""" - return self._oldest_ts def pop_pending(self, entity_id: str) -> States | None: """Pop a pending state. @@ -58,8 +44,6 @@ class StatesManager: recorder thread. """ self._pending[entity_id] = state - if self._oldest_ts is None: - self._oldest_ts = state.last_updated_ts def update_pending_last_reported( self, state_id: int, last_reported_timestamp: float @@ -90,22 +74,6 @@ class StatesManager: """ self._last_committed_id.clear() self._pending.clear() - self._oldest_ts = None - - def load_from_db(self, session: Session) -> None: - """Update the cache. - - Must run in the recorder thread. - """ - result = cast( - Sequence[Row[Any]], - execute_stmt_lambda_element(session, find_oldest_state()), - ) - if not result: - ts = None - else: - ts = result[0].last_updated_ts - self._oldest_ts = ts def evict_purged_state_ids(self, purged_state_ids: set[int]) -> None: """Evict purged states from the committed states. diff --git a/homeassistant/components/recorder/tasks.py b/homeassistant/components/recorder/tasks.py index fa10c12aa68..783f0a80b8e 100644 --- a/homeassistant/components/recorder/tasks.py +++ b/homeassistant/components/recorder/tasks.py @@ -120,6 +120,8 @@ class PurgeTask(RecorderTask): if purge.purge_old_data( instance, self.purge_before, self.repack, self.apply_filter ): + with instance.get_session() as session: + instance.recorder_runs_manager.load_from_db(session) # We always need to do the db cleanups after a purge # is finished to ensure the WAL checkpoint and other # tasks happen after a vacuum. diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index c3ff5027b70..ea764b14401 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -112,9 +112,6 @@ async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder) async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" - assert recorder_mock.states_manager.oldest_ts is None - oldest_ts = recorder_mock.states_manager.oldest_ts - await _add_test_states(hass) # make sure we start with 6 states @@ -130,10 +127,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert recorder_mock.states_manager.oldest_ts != oldest_ts - assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts - oldest_ts = recorder_mock.states_manager.oldest_ts - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id purge_before = dt_util.utcnow() - timedelta(days=4) @@ -147,8 +140,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished - # states_manager.oldest_ts is not updated until after the purge is complete - assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -171,8 +162,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> finished = purge_old_data(recorder_mock, purge_before, repack=False) assert finished - # states_manager.oldest_ts should now be updated - assert recorder_mock.states_manager.oldest_ts != oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -180,10 +169,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> assert states.count() == 2 assert state_attributes.count() == 1 - assert recorder_mock.states_manager.oldest_ts != oldest_ts - assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts - oldest_ts = recorder_mock.states_manager.oldest_ts - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id # run purge_old_data again @@ -196,8 +181,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished - # states_manager.oldest_ts is not updated until after the purge is complete - assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: assert states.count() == 0 From 16ad2d52c7bd9ece9a202f236644d92fc0cbe013 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 15 Dec 2024 13:07:10 +0100 Subject: [PATCH 264/677] Improve MQTT json color_temp validation (#133174) * Improve MQTT json color_temp validation * Revert unrelated changes and assert on logs * Typo --- homeassistant/components/mqtt/light/schema_json.py | 2 +- tests/components/mqtt/test_light_json.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index 5901967610a..5880a684ec0 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -490,7 +490,7 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): ) except KeyError: pass - except ValueError: + except (TypeError, ValueError): _LOGGER.warning( "Invalid color temp value '%s' received for entity %s", values["color_temp"], diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index b1031bec342..c6032678a47 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -2185,7 +2185,9 @@ async def test_white_scale( ], ) async def test_invalid_values( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that invalid color/brightness/etc. values are ignored.""" await mqtt_mock_entry() @@ -2287,6 +2289,10 @@ async def test_invalid_values( async_fire_mqtt_message( hass, "test_light_rgb", '{"state":"ON", "color_temp": "badValue"}' ) + assert ( + "Invalid color temp value 'badValue' received for entity light.test" + in caplog.text + ) # Color temperature should not have changed state = hass.states.get("light.test") From c2ee020eee3dde7c532124b74dd9891cb07d6ae1 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Sun, 15 Dec 2024 13:14:32 +0100 Subject: [PATCH 265/677] Update quality scale documentation rules in IronOS integration (#133245) --- .../components/iron_os/quality_scale.yaml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/iron_os/quality_scale.yaml b/homeassistant/components/iron_os/quality_scale.yaml index a379e7965b3..5ede3d6971d 100644 --- a/homeassistant/components/iron_os/quality_scale.yaml +++ b/homeassistant/components/iron_os/quality_scale.yaml @@ -12,9 +12,9 @@ rules: docs-actions: status: done comment: Integration does register actions aside from entity actions - docs-high-level-description: todo - docs-installation-instructions: todo - docs-removal-instructions: todo + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done entity-event-setup: status: exempt comment: Integration does not register events. @@ -52,13 +52,13 @@ rules: status: exempt comment: Device is not connected to an ip network. Other information from discovery is immutable and does not require updating. discovery: done - docs-data-update: todo - docs-examples: todo - docs-known-limitations: todo + docs-data-update: done + docs-examples: done + docs-known-limitations: done docs-supported-devices: done docs-supported-functions: done - docs-troubleshooting: todo - docs-use-cases: todo + docs-troubleshooting: done + docs-use-cases: done dynamic-devices: status: exempt comment: Only one device per config entry. New devices are set up as new entries. From b13a54f605dbf1c1c164d2e9140de81e4ad0ead7 Mon Sep 17 00:00:00 2001 From: Dan Raper Date: Sun, 15 Dec 2024 13:22:21 +0000 Subject: [PATCH 266/677] Add button platform to Ohme (#133267) * Add button platform and reauth flow * CI fixes * Test comment change * Remove reauth from this PR * Move is_supported_fn to OhmeEntityDescription * Set parallel updates to 1 * Add coordinator refresh to button press * Add exception handling to button async_press --- homeassistant/components/ohme/button.py | 77 ++++++++++++++++++ homeassistant/components/ohme/const.py | 2 +- homeassistant/components/ohme/entity.py | 12 +++ homeassistant/components/ohme/icons.json | 5 ++ .../components/ohme/quality_scale.yaml | 5 +- homeassistant/components/ohme/sensor.py | 5 +- homeassistant/components/ohme/strings.json | 5 ++ .../ohme/snapshots/test_button.ambr | 47 +++++++++++ tests/components/ohme/test_button.py | 79 +++++++++++++++++++ 9 files changed, 229 insertions(+), 8 deletions(-) create mode 100644 homeassistant/components/ohme/button.py create mode 100644 tests/components/ohme/snapshots/test_button.ambr create mode 100644 tests/components/ohme/test_button.py diff --git a/homeassistant/components/ohme/button.py b/homeassistant/components/ohme/button.py new file mode 100644 index 00000000000..21792770bb4 --- /dev/null +++ b/homeassistant/components/ohme/button.py @@ -0,0 +1,77 @@ +"""Platform for button.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +from ohme import ApiException, ChargerStatus, OhmeApiClient + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OhmeConfigEntry +from .const import DOMAIN +from .entity import OhmeEntity, OhmeEntityDescription + +PARALLEL_UPDATES = 1 + + +@dataclass(frozen=True, kw_only=True) +class OhmeButtonDescription(OhmeEntityDescription, ButtonEntityDescription): + """Class describing Ohme button entities.""" + + press_fn: Callable[[OhmeApiClient], Awaitable[None]] + available_fn: Callable[[OhmeApiClient], bool] + + +BUTTON_DESCRIPTIONS = [ + OhmeButtonDescription( + key="approve", + translation_key="approve", + press_fn=lambda client: client.async_approve_charge(), + is_supported_fn=lambda client: client.is_capable("pluginsRequireApprovalMode"), + available_fn=lambda client: client.status is ChargerStatus.PENDING_APPROVAL, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: OhmeConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up buttons.""" + coordinator = config_entry.runtime_data.charge_session_coordinator + + async_add_entities( + OhmeButton(coordinator, description) + for description in BUTTON_DESCRIPTIONS + if description.is_supported_fn(coordinator.client) + ) + + +class OhmeButton(OhmeEntity, ButtonEntity): + """Generic button for Ohme.""" + + entity_description: OhmeButtonDescription + + async def async_press(self) -> None: + """Handle the button press.""" + try: + await self.entity_description.press_fn(self.coordinator.client) + except ApiException as e: + raise HomeAssistantError( + translation_key="api_failed", translation_domain=DOMAIN + ) from e + await self.coordinator.async_request_refresh() + + @property + def available(self) -> bool: + """Is entity available.""" + + return super().available and self.entity_description.available_fn( + self.coordinator.client + ) diff --git a/homeassistant/components/ohme/const.py b/homeassistant/components/ohme/const.py index adc5ddfd61b..b44262ad509 100644 --- a/homeassistant/components/ohme/const.py +++ b/homeassistant/components/ohme/const.py @@ -3,4 +3,4 @@ from homeassistant.const import Platform DOMAIN = "ohme" -PLATFORMS = [Platform.SENSOR] +PLATFORMS = [Platform.BUTTON, Platform.SENSOR] diff --git a/homeassistant/components/ohme/entity.py b/homeassistant/components/ohme/entity.py index 2c662f7fccb..6a7d0ea16e4 100644 --- a/homeassistant/components/ohme/entity.py +++ b/homeassistant/components/ohme/entity.py @@ -1,5 +1,10 @@ """Base class for entities.""" +from collections.abc import Callable +from dataclasses import dataclass + +from ohme import OhmeApiClient + from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -8,6 +13,13 @@ from .const import DOMAIN from .coordinator import OhmeBaseCoordinator +@dataclass(frozen=True) +class OhmeEntityDescription(EntityDescription): + """Class describing Ohme entities.""" + + is_supported_fn: Callable[[OhmeApiClient], bool] = lambda _: True + + class OhmeEntity(CoordinatorEntity[OhmeBaseCoordinator]): """Base class for all Ohme entities.""" diff --git a/homeassistant/components/ohme/icons.json b/homeassistant/components/ohme/icons.json index 228907b3dbe..d5bf3fa1187 100644 --- a/homeassistant/components/ohme/icons.json +++ b/homeassistant/components/ohme/icons.json @@ -1,5 +1,10 @@ { "entity": { + "button": { + "approve": { + "default": "mdi:check-decagram" + } + }, "sensor": { "status": { "default": "mdi:car", diff --git a/homeassistant/components/ohme/quality_scale.yaml b/homeassistant/components/ohme/quality_scale.yaml index cffc9eb7b82..15697cb11a3 100644 --- a/homeassistant/components/ohme/quality_scale.yaml +++ b/homeassistant/components/ohme/quality_scale.yaml @@ -29,10 +29,7 @@ rules: unique-config-entry: done # Silver - action-exceptions: - status: exempt - comment: | - This integration has no custom actions and read-only platform only. + action-exceptions: done config-entry-unloading: done docs-configuration-parameters: status: exempt diff --git a/homeassistant/components/ohme/sensor.py b/homeassistant/components/ohme/sensor.py index d4abaf85b1f..6d111cf7af6 100644 --- a/homeassistant/components/ohme/sensor.py +++ b/homeassistant/components/ohme/sensor.py @@ -18,17 +18,16 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import OhmeConfigEntry -from .entity import OhmeEntity +from .entity import OhmeEntity, OhmeEntityDescription PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) -class OhmeSensorDescription(SensorEntityDescription): +class OhmeSensorDescription(OhmeEntityDescription, SensorEntityDescription): """Class describing Ohme sensor entities.""" value_fn: Callable[[OhmeApiClient], str | int | float] - is_supported_fn: Callable[[OhmeApiClient], bool] = lambda _: True SENSOR_CHARGE_SESSION = [ diff --git a/homeassistant/components/ohme/strings.json b/homeassistant/components/ohme/strings.json index 06231ed5cf4..42e0a60b83e 100644 --- a/homeassistant/components/ohme/strings.json +++ b/homeassistant/components/ohme/strings.json @@ -22,6 +22,11 @@ } }, "entity": { + "button": { + "approve": { + "name": "Approve charge" + } + }, "sensor": { "status": { "name": "Status", diff --git a/tests/components/ohme/snapshots/test_button.ambr b/tests/components/ohme/snapshots/test_button.ambr new file mode 100644 index 00000000000..32de16208f4 --- /dev/null +++ b/tests/components/ohme/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_buttons[button.ohme_home_pro_approve_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.ohme_home_pro_approve_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Approve charge', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'approve', + 'unique_id': 'chargerid_approve', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.ohme_home_pro_approve_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohme Home Pro Approve charge', + }), + 'context': , + 'entity_id': 'button.ohme_home_pro_approve_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- diff --git a/tests/components/ohme/test_button.py b/tests/components/ohme/test_button.py new file mode 100644 index 00000000000..1728563b2e9 --- /dev/null +++ b/tests/components/ohme/test_button.py @@ -0,0 +1,79 @@ +"""Tests for sensors.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from ohme import ChargerStatus +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_buttons( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the Ohme buttons.""" + with patch("homeassistant.components.ohme.PLATFORMS", [Platform.BUTTON]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_button_available( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test that button shows as unavailable when a charge is not pending approval.""" + mock_client.status = ChargerStatus.PENDING_APPROVAL + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("button.ohme_home_pro_approve_charge") + assert state.state == STATE_UNKNOWN + + mock_client.status = ChargerStatus.PLUGGED_IN + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("button.ohme_home_pro_approve_charge") + assert state.state == STATE_UNAVAILABLE + + +async def test_button_press( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the button press action.""" + mock_client.status = ChargerStatus.PENDING_APPROVAL + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.ohme_home_pro_approve_charge", + }, + blocking=True, + ) + + assert len(mock_client.async_approve_charge.mock_calls) == 1 From b4b6067e8ee3ec660b893cba734c0f83aa89d211 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sun, 15 Dec 2024 14:41:35 +0100 Subject: [PATCH 267/677] Use typed BMWConfigEntry (#133272) --- homeassistant/components/bmw_connected_drive/__init__.py | 7 +++---- .../components/bmw_connected_drive/config_flow.py | 4 ++-- .../components/bmw_connected_drive/coordinator.py | 2 +- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/__init__.py b/homeassistant/components/bmw_connected_drive/__init__.py index 5ec678b9c95..7b6fb4119db 100644 --- a/homeassistant/components/bmw_connected_drive/__init__.py +++ b/homeassistant/components/bmw_connected_drive/__init__.py @@ -6,7 +6,6 @@ import logging import voluptuous as vol -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE_ID, CONF_ENTITY_ID, CONF_NAME, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( @@ -50,7 +49,7 @@ SERVICE_UPDATE_STATE = "update_state" @callback def _async_migrate_options_from_data_if_missing( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: BMWConfigEntry ) -> None: data = dict(entry.data) options = dict(entry.options) @@ -116,7 +115,7 @@ async def _async_migrate_entries( return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: BMWConfigEntry) -> bool: """Set up BMW Connected Drive from a config entry.""" _async_migrate_options_from_data_if_missing(hass, entry) @@ -164,7 +163,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: BMWConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms( diff --git a/homeassistant/components/bmw_connected_drive/config_flow.py b/homeassistant/components/bmw_connected_drive/config_flow.py index 95fec101c9d..04fb3842dfa 100644 --- a/homeassistant/components/bmw_connected_drive/config_flow.py +++ b/homeassistant/components/bmw_connected_drive/config_flow.py @@ -18,7 +18,6 @@ import voluptuous as vol from homeassistant.config_entries import ( SOURCE_REAUTH, SOURCE_RECONFIGURE, - ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, @@ -39,6 +38,7 @@ from .const import ( CONF_READ_ONLY, CONF_REFRESH_TOKEN, ) +from .coordinator import BMWConfigEntry DATA_SCHEMA = vol.Schema( { @@ -224,7 +224,7 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: BMWConfigEntry, ) -> BMWOptionsFlow: """Return a MyBMW option flow.""" return BMWOptionsFlow() diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index 815bf3393e4..b54d9245bbd 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -42,7 +42,7 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): account: MyBMWAccount config_entry: BMWConfigEntry - def __init__(self, hass: HomeAssistant, *, config_entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, *, config_entry: BMWConfigEntry) -> None: """Initialize account-wide BMW data updater.""" self.account = MyBMWAccount( config_entry.data[CONF_USERNAME], From 95babbef21296faf157f28dd4a10da4398282220 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 15 Dec 2024 17:39:25 +0100 Subject: [PATCH 268/677] Fix two typos in KEF strings (#133294) --- homeassistant/components/kef/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/kef/strings.json b/homeassistant/components/kef/strings.json index e5ffff68162..c8aa644333a 100644 --- a/homeassistant/components/kef/strings.json +++ b/homeassistant/components/kef/strings.json @@ -22,14 +22,14 @@ }, "high_pass": { "name": "High pass", - "description": "High-pass mode\"." + "description": "High-pass mode." }, "sub_polarity": { "name": "Subwoofer polarity", "description": "Sub polarity." }, "bass_extension": { - "name": "Base extension", + "name": "Bass extension", "description": "Bass extension." } } From 51422a4502d4e63c388f9332f000f291e6d0283e Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 15 Dec 2024 17:41:43 +0100 Subject: [PATCH 269/677] Bump pynordpool 0.2.3 (#133277) --- homeassistant/components/nordpool/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nordpool/manifest.json b/homeassistant/components/nordpool/manifest.json index bf093eb3ee9..b3a18eb040a 100644 --- a/homeassistant/components/nordpool/manifest.json +++ b/homeassistant/components/nordpool/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["pynordpool"], - "requirements": ["pynordpool==0.2.2"], + "requirements": ["pynordpool==0.2.3"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index e4b9787c641..cfa3763ce0e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2115,7 +2115,7 @@ pynetio==0.1.9.1 pynobo==1.8.1 # homeassistant.components.nordpool -pynordpool==0.2.2 +pynordpool==0.2.3 # homeassistant.components.nuki pynuki==1.6.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 58f6d599825..d269c63d097 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1714,7 +1714,7 @@ pynetgear==0.10.10 pynobo==1.8.1 # homeassistant.components.nordpool -pynordpool==0.2.2 +pynordpool==0.2.3 # homeassistant.components.nuki pynuki==1.6.3 From 042d4cd39b77511fe76ed7de12055ae721012914 Mon Sep 17 00:00:00 2001 From: Conor Eager Date: Mon, 16 Dec 2024 05:43:21 +1300 Subject: [PATCH 270/677] Bump starlink-grpc-core to 1.2.1 to fix missing ping (#133183) --- homeassistant/components/starlink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/starlink/manifest.json b/homeassistant/components/starlink/manifest.json index 070cbf1b44c..15bad3ebc2e 100644 --- a/homeassistant/components/starlink/manifest.json +++ b/homeassistant/components/starlink/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/starlink", "iot_class": "local_polling", - "requirements": ["starlink-grpc-core==1.2.0"] + "requirements": ["starlink-grpc-core==1.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index cfa3763ce0e..cd2b0c04544 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2747,7 +2747,7 @@ starline==0.1.5 starlingbank==3.2 # homeassistant.components.starlink -starlink-grpc-core==1.2.0 +starlink-grpc-core==1.2.2 # homeassistant.components.statsd statsd==3.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d269c63d097..6101fe6e41e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2202,7 +2202,7 @@ srpenergy==1.3.6 starline==0.1.5 # homeassistant.components.starlink -starlink-grpc-core==1.2.0 +starlink-grpc-core==1.2.2 # homeassistant.components.statsd statsd==3.2.1 From f069f340a3c0215cf455b07abb43fe707316ae2b Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 15 Dec 2024 08:53:36 -0800 Subject: [PATCH 271/677] Explicitly set `PARALLEL_UPDATES` for Google Tasks (#133296) --- homeassistant/components/google_tasks/quality_scale.yaml | 2 +- homeassistant/components/google_tasks/todo.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml index 94c81d0b7f8..0cecb88484f 100644 --- a/homeassistant/components/google_tasks/quality_scale.yaml +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -42,7 +42,7 @@ rules: action-exceptions: done docs-installation-parameters: todo integration-owner: done - parallel-updates: todo + parallel-updates: done test-coverage: status: todo comment: Test coverage for __init__.py is not above 95% yet diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index d749adbfb2b..9a44b91b529 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -19,6 +19,7 @@ from homeassistant.util import dt as dt_util from .coordinator import TaskUpdateCoordinator from .types import GoogleTasksConfigEntry +PARALLEL_UPDATES = 0 SCAN_INTERVAL = timedelta(minutes=15) TODO_STATUS_MAP = { From 2a49378f4cb3e808bee83d959aaff9755da044cb Mon Sep 17 00:00:00 2001 From: Tomer Shemesh Date: Sun, 15 Dec 2024 12:27:17 -0500 Subject: [PATCH 272/677] Refactor Onkyo tests to patch underlying pyeiscp library (#132653) * Refactor Onkyo tests to patch underlying pyeiscp library instead of home assistant methods * limit test patches to specific component, move atches into conftest * use patch.multiple and restrict patches to specific component * use side effect instead of mocking method --- tests/components/onkyo/__init__.py | 10 + tests/components/onkyo/conftest.py | 68 ++++- tests/components/onkyo/test_config_flow.py | 273 +++++++++------------ 3 files changed, 179 insertions(+), 172 deletions(-) diff --git a/tests/components/onkyo/__init__.py b/tests/components/onkyo/__init__.py index 8900f189aea..064075d109e 100644 --- a/tests/components/onkyo/__init__.py +++ b/tests/components/onkyo/__init__.py @@ -19,6 +19,16 @@ def create_receiver_info(id: int) -> ReceiverInfo: ) +def create_connection(id: int) -> Mock: + """Create an mock connection object for testing.""" + connection = Mock() + connection.host = f"host {id}" + connection.port = 0 + connection.name = f"type {id}" + connection.identifier = f"id{id}" + return connection + + def create_config_entry_from_info(info: ReceiverInfo) -> MockConfigEntry: """Create a config entry from receiver info.""" data = {CONF_HOST: info.host} diff --git a/tests/components/onkyo/conftest.py b/tests/components/onkyo/conftest.py index c37966e3bae..abbe39dd966 100644 --- a/tests/components/onkyo/conftest.py +++ b/tests/components/onkyo/conftest.py @@ -1,25 +1,16 @@ """Configure tests for the Onkyo integration.""" -from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest from homeassistant.components.onkyo.const import DOMAIN +from . import create_connection + from tests.common import MockConfigEntry -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: - """Override async_setup_entry.""" - with patch( - "homeassistant.components.onkyo.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - yield mock_setup_entry - - @pytest.fixture(name="config_entry") def mock_config_entry() -> MockConfigEntry: """Create Onkyo entry in Home Assistant.""" @@ -28,3 +19,56 @@ def mock_config_entry() -> MockConfigEntry: title="Onkyo", data={}, ) + + +@pytest.fixture(autouse=True) +def patch_timeouts(): + """Patch timeouts to avoid tests waiting.""" + with patch.multiple( + "homeassistant.components.onkyo.receiver", + DEVICE_INTERVIEW_TIMEOUT=0, + DEVICE_DISCOVERY_TIMEOUT=0, + ): + yield + + +@pytest.fixture +async def default_mock_discovery(): + """Mock discovery with a single device.""" + + async def mock_discover(host=None, discovery_callback=None, timeout=0): + await discovery_callback(create_connection(1)) + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, + ): + yield + + +@pytest.fixture +async def stub_mock_discovery(): + """Mock discovery with no devices.""" + + async def mock_discover(host=None, discovery_callback=None, timeout=0): + pass + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, + ): + yield + + +@pytest.fixture +async def empty_mock_discovery(): + """Mock discovery with an empty connection.""" + + async def mock_discover(host=None, discovery_callback=None, timeout=0): + await discovery_callback(None) + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, + ): + yield diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py index a9d6f072559..1ee0bfdf9c5 100644 --- a/tests/components/onkyo/test_config_flow.py +++ b/tests/components/onkyo/test_config_flow.py @@ -20,12 +20,13 @@ from homeassistant.data_entry_flow import FlowResultType, InvalidData from . import ( create_config_entry_from_info, + create_connection, create_empty_config_entry, create_receiver_info, setup_integration, ) -from tests.common import Mock, MockConfigEntry +from tests.common import MockConfigEntry async def test_user_initial_menu(hass: HomeAssistant) -> None: @@ -40,9 +41,8 @@ async def test_user_initial_menu(hass: HomeAssistant) -> None: assert not set(init_result["menu_options"]) ^ {"manual", "eiscp_discovery"} -async def test_manual_valid_host(hass: HomeAssistant) -> None: +async def test_manual_valid_host(hass: HomeAssistant, default_mock_discovery) -> None: """Test valid host entered.""" - init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, @@ -53,30 +53,17 @@ async def test_manual_valid_host(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - mock_info = Mock() - mock_info.identifier = "mock_id" - mock_info.host = "mock_host" - mock_info.model_name = "mock_model" + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "host 1"}, + ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) - - assert select_result["step_id"] == "configure_receiver" - assert ( - select_result["description_placeholders"]["name"] - == "mock_model (mock_host)" - ) + assert select_result["step_id"] == "configure_receiver" + assert select_result["description_placeholders"]["name"] == "type 1 (host 1)" -async def test_manual_invalid_host(hass: HomeAssistant) -> None: +async def test_manual_invalid_host(hass: HomeAssistant, stub_mock_discovery) -> None: """Test invalid host entered.""" - init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, @@ -87,19 +74,18 @@ async def test_manual_invalid_host(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", return_value=None - ): - host_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) + host_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) assert host_result["step_id"] == "manual" assert host_result["errors"]["base"] == "cannot_connect" -async def test_manual_valid_host_unexpected_error(hass: HomeAssistant) -> None: +async def test_manual_valid_host_unexpected_error( + hass: HomeAssistant, empty_mock_discovery +) -> None: """Test valid host entered.""" init_result = await hass.config_entries.flow.async_init( @@ -112,55 +98,49 @@ async def test_manual_valid_host_unexpected_error(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - side_effect=Exception(), - ): - host_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) + host_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) assert host_result["step_id"] == "manual" assert host_result["errors"]["base"] == "unknown" -async def test_discovery_and_no_devices_discovered(hass: HomeAssistant) -> None: +async def test_discovery_and_no_devices_discovered( + hass: HomeAssistant, stub_mock_discovery +) -> None: """Test initial menu.""" init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, ) - with patch( - "homeassistant.components.onkyo.config_flow.async_discover", return_value=[] - ): - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "eiscp_discovery"}, - ) + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) - assert form_result["type"] is FlowResultType.ABORT - assert form_result["reason"] == "no_devices_found" + assert form_result["type"] is FlowResultType.ABORT + assert form_result["reason"] == "no_devices_found" -async def test_discovery_with_exception(hass: HomeAssistant) -> None: +async def test_discovery_with_exception( + hass: HomeAssistant, empty_mock_discovery +) -> None: """Test discovery which throws an unexpected exception.""" init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, ) - with patch( - "homeassistant.components.onkyo.config_flow.async_discover", - side_effect=Exception(), - ): - form_result = await hass.config_entries.flow.async_configure( - init_result["flow_id"], - {"next_step_id": "eiscp_discovery"}, - ) - assert form_result["type"] is FlowResultType.ABORT - assert form_result["reason"] == "unknown" + form_result = await hass.config_entries.flow.async_configure( + init_result["flow_id"], + {"next_step_id": "eiscp_discovery"}, + ) + + assert form_result["type"] is FlowResultType.ABORT + assert form_result["reason"] == "unknown" async def test_discovery_with_new_and_existing_found(hass: HomeAssistant) -> None: @@ -170,13 +150,12 @@ async def test_discovery_with_new_and_existing_found(hass: HomeAssistant) -> Non context={"source": SOURCE_USER}, ) - infos = [create_receiver_info(1), create_receiver_info(2)] + async def mock_discover(discovery_callback, timeout): + await discovery_callback(create_connection(1)) + await discovery_callback(create_connection(2)) with ( - patch( - "homeassistant.components.onkyo.config_flow.async_discover", - return_value=infos, - ), + patch("pyeiscp.Connection.discover", new=mock_discover), # Fake it like the first entry was already added patch.object(OnkyoConfigFlow, "_async_current_ids", return_value=["id1"]), ): @@ -185,12 +164,12 @@ async def test_discovery_with_new_and_existing_found(hass: HomeAssistant) -> Non {"next_step_id": "eiscp_discovery"}, ) - assert form_result["type"] is FlowResultType.FORM + assert form_result["type"] is FlowResultType.FORM - assert form_result["data_schema"] is not None - schema = form_result["data_schema"].schema - container = schema["device"].container - assert container == {"id2": "type 2 (host 2)"} + assert form_result["data_schema"] is not None + schema = form_result["data_schema"].schema + container = schema["device"].container + assert container == {"id2": "type 2 (host 2)"} async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: @@ -200,14 +179,11 @@ async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: context={"source": SOURCE_USER}, ) - infos = [create_receiver_info(42), create_receiver_info(0)] + async def mock_discover(discovery_callback, timeout): + await discovery_callback(create_connection(42)) + await discovery_callback(create_connection(0)) - with ( - patch( - "homeassistant.components.onkyo.config_flow.async_discover", - return_value=infos, - ), - ): + with patch("pyeiscp.Connection.discover", new=mock_discover): form_result = await hass.config_entries.flow.async_configure( init_result["flow_id"], {"next_step_id": "eiscp_discovery"}, @@ -218,11 +194,13 @@ async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: user_input={"device": "id42"}, ) - assert select_result["step_id"] == "configure_receiver" - assert select_result["description_placeholders"]["name"] == "type 42 (host 42)" + assert select_result["step_id"] == "configure_receiver" + assert select_result["description_placeholders"]["name"] == "type 42 (host 42)" -async def test_configure_empty_source_list(hass: HomeAssistant) -> None: +async def test_configure_empty_source_list( + hass: HomeAssistant, default_mock_discovery +) -> None: """Test receiver configuration with no sources set.""" init_result = await hass.config_entries.flow.async_init( @@ -235,29 +213,22 @@ async def test_configure_empty_source_list(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - mock_info = Mock() - mock_info.identifier = "mock_id" + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) + configure_result = await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"volume_resolution": 200, "input_sources": []}, + ) - configure_result = await hass.config_entries.flow.async_configure( - select_result["flow_id"], - user_input={"volume_resolution": 200, "input_sources": []}, - ) - - assert configure_result["errors"] == { - "input_sources": "empty_input_source_list" - } + assert configure_result["errors"] == {"input_sources": "empty_input_source_list"} -async def test_configure_no_resolution(hass: HomeAssistant) -> None: +async def test_configure_no_resolution( + hass: HomeAssistant, default_mock_discovery +) -> None: """Test receiver configure with no resolution set.""" init_result = await hass.config_entries.flow.async_init( @@ -270,26 +241,21 @@ async def test_configure_no_resolution(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - mock_info = Mock() - mock_info.identifier = "mock_id" + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, + with pytest.raises(InvalidData): + await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"input_sources": ["TV"]}, ) - with pytest.raises(InvalidData): - await hass.config_entries.flow.async_configure( - select_result["flow_id"], - user_input={"input_sources": ["TV"]}, - ) - -async def test_configure_resolution_set(hass: HomeAssistant) -> None: +async def test_configure_resolution_set( + hass: HomeAssistant, default_mock_discovery +) -> None: """Test receiver configure with specified resolution.""" init_result = await hass.config_entries.flow.async_init( @@ -302,16 +268,10 @@ async def test_configure_resolution_set(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - receiver_info = create_receiver_info(1) - - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=receiver_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, - ) + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) configure_result = await hass.config_entries.flow.async_configure( select_result["flow_id"], @@ -322,7 +282,9 @@ async def test_configure_resolution_set(hass: HomeAssistant) -> None: assert configure_result["options"]["volume_resolution"] == 200 -async def test_configure_invalid_resolution_set(hass: HomeAssistant) -> None: +async def test_configure_invalid_resolution_set( + hass: HomeAssistant, default_mock_discovery +) -> None: """Test receiver configure with invalid resolution.""" init_result = await hass.config_entries.flow.async_init( @@ -335,26 +297,19 @@ async def test_configure_invalid_resolution_set(hass: HomeAssistant) -> None: {"next_step_id": "manual"}, ) - mock_info = Mock() - mock_info.identifier = "mock_id" + select_result = await hass.config_entries.flow.async_configure( + form_result["flow_id"], + user_input={CONF_HOST: "sample-host-name"}, + ) - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=mock_info, - ): - select_result = await hass.config_entries.flow.async_configure( - form_result["flow_id"], - user_input={CONF_HOST: "sample-host-name"}, + with pytest.raises(InvalidData): + await hass.config_entries.flow.async_configure( + select_result["flow_id"], + user_input={"volume_resolution": 42, "input_sources": ["TV"]}, ) - with pytest.raises(InvalidData): - await hass.config_entries.flow.async_configure( - select_result["flow_id"], - user_input={"volume_resolution": 42, "input_sources": ["TV"]}, - ) - -async def test_reconfigure(hass: HomeAssistant) -> None: +async def test_reconfigure(hass: HomeAssistant, default_mock_discovery) -> None: """Test the reconfigure config flow.""" receiver_info = create_receiver_info(1) config_entry = create_config_entry_from_info(receiver_info) @@ -368,14 +323,10 @@ async def test_reconfigure(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "manual" - with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=receiver_info, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"host": receiver_info.host} - ) - await hass.async_block_till_done() + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"host": receiver_info.host} + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "configure_receiver" @@ -403,14 +354,18 @@ async def test_reconfigure_new_device(hass: HomeAssistant) -> None: result = await config_entry.start_reconfigure_flow(hass) - receiver_info_2 = create_receiver_info(2) + mock_connection = create_connection(2) + + # Create mock discover that calls callback immediately + async def mock_discover(host, discovery_callback, timeout): + await discovery_callback(mock_connection) with patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=receiver_info_2, + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + new=mock_discover, ): result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"host": receiver_info_2.host} + result["flow_id"], user_input={"host": mock_connection.host} ) await hass.async_block_till_done() @@ -455,12 +410,10 @@ async def test_import_fail( error: str, ) -> None: """Test import flow failed.""" - with ( - patch( - "homeassistant.components.onkyo.config_flow.async_interview", - return_value=None, - side_effect=exception, - ), + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + side_effect=exception, ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=user_input From e9515111323194e9c83f21d856fa8a3d647c0450 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 15 Dec 2024 19:26:46 +0100 Subject: [PATCH 273/677] Allow load_verify_locations with only cadata passed (#133299) --- homeassistant/block_async_io.py | 8 +++++++- tests/test_block_async_io.py | 6 ++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/homeassistant/block_async_io.py b/homeassistant/block_async_io.py index 7a68b2515e9..767716dbe27 100644 --- a/homeassistant/block_async_io.py +++ b/homeassistant/block_async_io.py @@ -50,6 +50,12 @@ def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool: return False +def _check_load_verify_locations_call_allowed(mapped_args: dict[str, Any]) -> bool: + # If only cadata is passed, we can ignore it + kwargs = mapped_args.get("kwargs") + return bool(kwargs and len(kwargs) == 1 and "cadata" in kwargs) + + @dataclass(slots=True, frozen=True) class BlockingCall: """Class to hold information about a blocking call.""" @@ -158,7 +164,7 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = ( original_func=SSLContext.load_verify_locations, object=SSLContext, function="load_verify_locations", - check_allowed=None, + check_allowed=_check_load_verify_locations_call_allowed, strict=False, strict_core=False, skip_for_tests=True, diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index dc2b096f595..dd23d4e9709 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -429,6 +429,12 @@ async def test_protect_loop_load_verify_locations( context.load_verify_locations("/dev/null") assert "Detected blocking call to load_verify_locations" in caplog.text + # ignore with only cadata + caplog.clear() + with pytest.raises(ssl.SSLError): + context.load_verify_locations(cadata="xxx") + assert "Detected blocking call to load_verify_locations" not in caplog.text + async def test_protect_loop_load_cert_chain( hass: HomeAssistant, caplog: pytest.LogCaptureFixture From 6d6445bfcffa2ca474c379d2e9a66564a99cff1e Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 15 Dec 2024 19:28:10 +0100 Subject: [PATCH 274/677] Update quality scale for Nord Pool (#133282) --- homeassistant/components/nordpool/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/nordpool/quality_scale.yaml b/homeassistant/components/nordpool/quality_scale.yaml index 79d5ac0ecea..dada1115715 100644 --- a/homeassistant/components/nordpool/quality_scale.yaml +++ b/homeassistant/components/nordpool/quality_scale.yaml @@ -86,7 +86,7 @@ rules: docs-supported-functions: done docs-data-update: done docs-known-limitations: done - docs-troubleshooting: todo + docs-troubleshooting: done docs-examples: done # Platinum From e81add5a065741bc9c61a7bc0fefbf1acdc1c9fd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 15 Dec 2024 12:28:29 -0600 Subject: [PATCH 275/677] Set code_arm_required to False for homekit_controller (#133284) --- .../components/homekit_controller/alarm_control_panel.py | 1 + tests/components/homekit_controller/snapshots/test_init.ambr | 4 ++-- .../components/homekit_controller/test_alarm_control_panel.py | 2 ++ 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/homekit_controller/alarm_control_panel.py b/homeassistant/components/homekit_controller/alarm_control_panel.py index 3cb80f2c817..b17f122dfa5 100644 --- a/homeassistant/components/homekit_controller/alarm_control_panel.py +++ b/homeassistant/components/homekit_controller/alarm_control_panel.py @@ -69,6 +69,7 @@ class HomeKitAlarmControlPanelEntity(HomeKitEntity, AlarmControlPanelEntity): | AlarmControlPanelEntityFeature.ARM_AWAY | AlarmControlPanelEntityFeature.ARM_NIGHT ) + _attr_code_arm_required = False def get_characteristic_types(self) -> list[str]: """Define the homekit characteristics the entity cares about.""" diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index b96da507adf..2bd5e7faf75 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -1474,7 +1474,7 @@ 'state': dict({ 'attributes': dict({ 'changed_by': None, - 'code_arm_required': True, + 'code_arm_required': False, 'code_format': None, 'friendly_name': 'Aqara-Hub-E1-00A0 Security System', 'supported_features': , @@ -1848,7 +1848,7 @@ 'state': dict({ 'attributes': dict({ 'changed_by': None, - 'code_arm_required': True, + 'code_arm_required': False, 'code_format': None, 'friendly_name': 'Aqara Hub-1563 Security System', 'supported_features': , diff --git a/tests/components/homekit_controller/test_alarm_control_panel.py b/tests/components/homekit_controller/test_alarm_control_panel.py index 1e9f023fc46..3ab9dc82e41 100644 --- a/tests/components/homekit_controller/test_alarm_control_panel.py +++ b/tests/components/homekit_controller/test_alarm_control_panel.py @@ -6,6 +6,7 @@ from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes +from homeassistant.components.alarm_control_panel import ATTR_CODE_ARM_REQUIRED from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -106,6 +107,7 @@ async def test_switch_read_alarm_state( state = await helper.poll_and_get_state() assert state.state == "armed_home" assert state.attributes["battery_level"] == 50 + assert state.attributes[ATTR_CODE_ARM_REQUIRED] is False await helper.async_update( ServicesTypes.SECURITY_SYSTEM, From 9e8a158c891b424c7df0c70a3c4a737c90e2fb26 Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Sun, 15 Dec 2024 19:35:36 +0100 Subject: [PATCH 276/677] Bump plugwise to v1.6.4 and adapt (#133293) --- homeassistant/components/plugwise/climate.py | 10 ---------- homeassistant/components/plugwise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../fixtures/anna_heatpump_heating/all_data.json | 1 + .../plugwise/fixtures/legacy_anna/all_data.json | 1 + .../plugwise/fixtures/m_adam_cooling/all_data.json | 4 ++-- .../plugwise/fixtures/m_adam_jip/all_data.json | 1 - .../fixtures/m_anna_heatpump_cooling/all_data.json | 1 + .../fixtures/m_anna_heatpump_idle/all_data.json | 1 + 10 files changed, 9 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index 3cf536eb445..3caed1e7bc2 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -188,19 +188,9 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): """Return the current running hvac operation if supported.""" # Keep track of the previous action-mode self._previous_action_mode(self.coordinator) - - # Adam provides the hvac_action for each thermostat if (action := self.device.get("control_state")) is not None: return HVACAction(action) - # Anna - heater: str = self._gateway["heater_id"] - heater_data = self._devices[heater] - if heater_data["binary_sensors"]["heating_state"]: - return HVACAction.HEATING - if heater_data["binary_sensors"].get("cooling_state", False): - return HVACAction.COOLING - return HVACAction.IDLE @property diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index 60de4496779..80f5be974e1 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==1.6.3"], + "requirements": ["plugwise==1.6.4"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index cd2b0c04544..9ffc6a8f16e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1632,7 +1632,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.3 +plugwise==1.6.4 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6101fe6e41e..25c4167a0bf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1345,7 +1345,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.6.3 +plugwise==1.6.4 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json index 5fc2a114b2f..3a54c3fb9a2 100644 --- a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json +++ b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json @@ -62,6 +62,7 @@ "active_preset": "home", "available_schedules": ["standaard", "off"], "climate_mode": "auto", + "control_state": "heating", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", diff --git a/tests/components/plugwise/fixtures/legacy_anna/all_data.json b/tests/components/plugwise/fixtures/legacy_anna/all_data.json index 2cb439950af..9275b82cde9 100644 --- a/tests/components/plugwise/fixtures/legacy_anna/all_data.json +++ b/tests/components/plugwise/fixtures/legacy_anna/all_data.json @@ -37,6 +37,7 @@ "0d266432d64443e283b5d708ae98b455": { "active_preset": "home", "climate_mode": "heat", + "control_state": "heating", "dev_class": "thermostat", "firmware": "2017-03-13T11:54:58+01:00", "hardware": "6539-1301-500", diff --git a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json index c5afd68bed5..af6d4b83380 100644 --- a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json @@ -176,8 +176,8 @@ "Weekschema", "off" ], - "climate_mode": "cool", - "control_state": "idle", + "climate_mode": "auto", + "control_state": "cooling", "dev_class": "climate", "model": "ThermoZone", "name": "Bathroom", diff --git a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json index 1ca9e77010f..1a3ef66c147 100644 --- a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json @@ -3,7 +3,6 @@ "06aecb3d00354375924f50c47af36bd2": { "active_preset": "no_frost", "climate_mode": "off", - "control_state": "idle", "dev_class": "climate", "model": "ThermoZone", "name": "Slaapkamer", diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json index 74f20379d68..eaa42facf10 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json @@ -62,6 +62,7 @@ "active_preset": "home", "available_schedules": ["standaard", "off"], "climate_mode": "auto", + "control_state": "cooling", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json index 3b1e9bf8cac..52645b0f317 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json @@ -62,6 +62,7 @@ "active_preset": "home", "available_schedules": ["standaard", "off"], "climate_mode": "auto", + "control_state": "idle", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", From 544ebcf310a0663c62373faca0bfabcc2a50b83a Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 15 Dec 2024 19:35:50 +0100 Subject: [PATCH 277/677] Fix typo "configurered" in MQTT (#133295) --- homeassistant/components/mqtt/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index c062c111487..3b337c05d2a 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -115,7 +115,7 @@ "bad_ws_headers": "Supply valid HTTP headers as a JSON object", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "invalid_inclusion": "The client certificate and private key must be configurered together" + "invalid_inclusion": "The client certificate and private key must be configured together" } }, "device_automation": { From be6ed05aa220c47d37bd54f1af21759cff8b49e2 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sun, 15 Dec 2024 19:40:51 +0100 Subject: [PATCH 278/677] Improve Fronius tests (#132872) --- tests/components/fronius/__init__.py | 27 +- .../fronius/snapshots/test_sensor.ambr | 9024 +++++++++++++++++ tests/components/fronius/test_config_flow.py | 184 +- tests/components/fronius/test_coordinator.py | 12 +- tests/components/fronius/test_init.py | 24 +- tests/components/fronius/test_sensor.py | 260 +- 6 files changed, 9132 insertions(+), 399 deletions(-) create mode 100644 tests/components/fronius/snapshots/test_sensor.ambr diff --git a/tests/components/fronius/__init__.py b/tests/components/fronius/__init__.py index 57b22490ed0..8445e6b6a79 100644 --- a/tests/components/fronius/__init__.py +++ b/tests/components/fronius/__init__.py @@ -3,20 +3,16 @@ from __future__ import annotations from collections.abc import Callable -from datetime import timedelta import json from typing import Any -from freezegun.api import FrozenDateTimeFactory - from homeassistant.components.fronius.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.typing import UNDEFINED, UndefinedType -from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture +from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker MOCK_HOST = "http://fronius" @@ -115,24 +111,3 @@ def mock_responses( f"{host}/solar_api/v1/GetOhmPilotRealtimeData.cgi?Scope=System", text=_load(f"{fixture_set}/GetOhmPilotRealtimeData.json", "fronius"), ) - - -async def enable_all_entities( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - config_entry_id: str, - time_till_next_update: timedelta, -) -> None: - """Enable all entities for a config entry and fast forward time to receive data.""" - registry = er.async_get(hass) - entities = er.async_entries_for_config_entry(registry, config_entry_id) - for entry in [ - entry - for entry in entities - if entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION - ]: - registry.async_update_entity(entry.entity_id, disabled_by=None) - await hass.async_block_till_done() - freezer.tick(time_till_next_update) - async_fire_time_changed(hass) - await hass.async_block_till_done() diff --git a/tests/components/fronius/snapshots/test_sensor.ambr b/tests/components/fronius/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..700c09da2f6 --- /dev/null +++ b/tests/components/fronius/snapshots/test_sensor.ambr @@ -0,0 +1,9024 @@ +# serializer version: 1 +# name: test_gen24[sensor.inverter_name_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '12345678-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Inverter name AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1589', + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '12345678-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter name AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37.3204', + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '12345678-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Inverter name AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '234.9168', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '12345678-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Inverter name DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0783', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_current_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc_2', + 'unique_id': '12345678-current_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_current_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Inverter name DC current 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_current_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0754', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '12345678-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Inverter name DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '411.3811', + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_dc_voltage_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc_2', + 'unique_id': '12345678-voltage_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_dc_voltage_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Inverter name DC voltage 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_dc_voltage_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '403.4312', + }) +# --- +# name: test_gen24[sensor.inverter_name_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '12345678-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inverter name Error code', + }), + 'context': , + 'entity_id': 'sensor.inverter_name_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.inverter_name_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '12345678-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Inverter name Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9917', + }) +# --- +# name: test_gen24[sensor.inverter_name_inverter_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_inverter_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Inverter state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'inverter_state', + 'unique_id': '12345678-inverter_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_inverter_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inverter name Inverter state', + }), + 'context': , + 'entity_id': 'sensor.inverter_name_inverter_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Running', + }) +# --- +# name: test_gen24[sensor.inverter_name_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '12345678-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Inverter name Status code', + }), + 'context': , + 'entity_id': 'sensor.inverter_name_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_gen24[sensor.inverter_name_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '12345678-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Inverter name Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'context': , + 'entity_id': 'sensor.inverter_name_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_gen24[sensor.inverter_name_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_name_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '12345678-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.inverter_name_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter name Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_name_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1530193.42', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent', + 'unique_id': '1234567890-power_apparent', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '868.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_1', + 'unique_id': '1234567890-power_apparent_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '243.3', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_2', + 'unique_id': '1234567890-power_apparent_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '323.4', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_3', + 'unique_id': '1234567890-power_apparent_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '301.2', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_1', + 'unique_id': '1234567890-current_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.145', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_2', + 'unique_id': '1234567890-current_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.33', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_3', + 'unique_id': '1234567890-current_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_current_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.825', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_frequency_phase_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency phase average', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_phase_average', + 'unique_id': '1234567890-frequency_phase_average', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_frequency_phase_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Smart Meter TS 65A-3 Frequency phase average', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter location', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location', + 'unique_id': '1234567890-meter_location', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Meter location', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location_description-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter location description', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location_description', + 'unique_id': '1234567890-meter_location_description', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_meter_location_description-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Smart Meter TS 65A-3 Meter location description', + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'feed_in', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor', + 'unique_id': '1234567890-power_factor', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.828', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_1', + 'unique_id': '1234567890-power_factor_phase_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 1', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.441', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_2', + 'unique_id': '1234567890-power_factor_phase_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 2', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.934', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_3', + 'unique_id': '1234567890-power_factor_phase_3', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_power_factor_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 3', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.832', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_consumed', + 'unique_id': '1234567890-energy_reactive_ac_consumed', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy consumed', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '88221.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_produced', + 'unique_id': '1234567890-energy_reactive_ac_produced', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy produced', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1989125.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive', + 'unique_id': '1234567890-power_reactive', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-517.4', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_1', + 'unique_id': '1234567890-power_reactive_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-218.6', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_2', + 'unique_id': '1234567890-power_reactive_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-132.8', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_3', + 'unique_id': '1234567890-power_reactive_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-166.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_consumed', + 'unique_id': '1234567890-energy_real_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2013105.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_minus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy minus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_minus', + 'unique_id': '1234567890-energy_real_ac_minus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_minus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy minus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3863340.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy plus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_plus', + 'unique_id': '1234567890-energy_real_ac_plus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy plus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2013105.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_produced', + 'unique_id': '1234567890-energy_real_produced', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy produced', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3863340.0', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real', + 'unique_id': '1234567890-power_real', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '653.1', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_1', + 'unique_id': '1234567890-power_real_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '106.8', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_2', + 'unique_id': '1234567890-power_real_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '294.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_3', + 'unique_id': '1234567890-power_real_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_real_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '251.3', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_1', + 'unique_id': '1234567890-voltage_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '235.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1-2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_12', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_12', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1-2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '408.7', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_2', + 'unique_id': '1234567890-voltage_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '236.1', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2-3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_23', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_23', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2-3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '409.6', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_3', + 'unique_id': '1234567890-voltage_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '236.9', + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3-1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_31', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_31', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3-1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '409.4', + }) +# --- +# name: test_gen24[sensor.solarnet_meter_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter mode', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_mode', + 'unique_id': 'solar_net_123.4567890-power_flow-meter_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.solarnet_meter_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Meter mode', + }), + 'context': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'meter', + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '658.4', + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid export', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_export', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_export', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_import-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid import', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_import', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_import', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_grid_import-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_import', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '658.4', + }) +# --- +# name: test_gen24[sensor.solarnet_power_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-695.6827', + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_consumed', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '695.6827', + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_generated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_generated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load generated', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_generated', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_generated', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_load_generated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24[sensor.solarnet_power_photovoltaics-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power photovoltaics', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_photovoltaics', + 'unique_id': 'solar_net_123.4567890-power_flow-power_photovoltaics', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_power_photovoltaics-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power photovoltaics', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '62.9481', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_autonomy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative autonomy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_autonomy', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_autonomy', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_autonomy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative autonomy', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.3592', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_self_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative self consumption', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_self_consumption', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_self_consumption', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24[sensor.solarnet_relative_self_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative self consumption', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100.0', + }) +# --- +# name: test_gen24[sensor.solarnet_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24[sensor.solarnet_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1530193.42', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': 'P030T020Z2001234567 -current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'BYD Battery-Box Premium HV DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': 'P030T020Z2001234567 -voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'BYD Battery-Box Premium HV DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_designed_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_designed_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Designed capacity', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'capacity_designed', + 'unique_id': 'P030T020Z2001234567 -capacity_designed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_designed_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BYD Battery-Box Premium HV Designed capacity', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_designed_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16588', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_maximum_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_maximum_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Maximum capacity', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'capacity_maximum', + 'unique_id': 'P030T020Z2001234567 -capacity_maximum', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_maximum_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BYD Battery-Box Premium HV Maximum capacity', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_maximum_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16588', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_state_of_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_state_of_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State of charge', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_of_charge', + 'unique_id': 'P030T020Z2001234567 -state_of_charge', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_state_of_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'BYD Battery-Box Premium HV State of charge', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_state_of_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.6', + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.byd_battery_box_premium_hv_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_cell', + 'unique_id': 'P030T020Z2001234567 -temperature_cell', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.byd_battery_box_premium_hv_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'BYD Battery-Box Premium HV Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.byd_battery_box_premium_hv_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.5', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '12345678-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gen24 Storage AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.1087', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '12345678-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Gen24 Storage AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '250.9093', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '12345678-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Gen24 Storage AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '227.354', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '12345678-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gen24 Storage DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3952', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_current_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc_2', + 'unique_id': '12345678-current_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_current_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Gen24 Storage DC current 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_current_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3564', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '12345678-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Gen24 Storage DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '419.1009', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_dc_voltage_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc_2', + 'unique_id': '12345678-voltage_dc_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_dc_voltage_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Gen24 Storage DC voltage 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_dc_voltage_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '318.8103', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '12345678-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gen24 Storage Error code', + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '12345678-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Gen24 Storage Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9816', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_inverter_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_inverter_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Inverter state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'inverter_state', + 'unique_id': '12345678-inverter_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_inverter_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gen24 Storage Inverter state', + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_inverter_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Running', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '12345678-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gen24 Storage Status code', + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '12345678-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gen24 Storage Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gen24_storage_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '12345678-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Gen24 Storage Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7512794.0117', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohmpilot_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_consumed', + 'unique_id': '23456789-energy_real_ac_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Ohmpilot Energy consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1233295.0', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohmpilot_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_ac', + 'unique_id': '23456789-power_real_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Ohmpilot Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ohmpilot_state_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'State code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_code', + 'unique_id': '23456789-state_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohmpilot State code', + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'up_and_running', + 'keep_minimum_temperature', + 'legionella_protection', + 'critical_fault', + 'fault', + 'boost_mode', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ohmpilot_state_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_message', + 'unique_id': '23456789-state_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_state_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Ohmpilot State message', + 'options': list([ + 'up_and_running', + 'keep_minimum_temperature', + 'legionella_protection', + 'critical_fault', + 'fault', + 'boost_mode', + ]), + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_state_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'up_and_running', + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ohmpilot_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_channel_1', + 'unique_id': '23456789-temperature_channel_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.ohmpilot_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Ohmpilot Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ohmpilot_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent', + 'unique_id': '1234567890-power_apparent', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '821.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_1', + 'unique_id': '1234567890-power_apparent_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '319.5', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_2', + 'unique_id': '1234567890-power_apparent_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '383.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_apparent_phase_3', + 'unique_id': '1234567890-power_apparent_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_apparent_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Smart Meter TS 65A-3 Apparent power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_apparent_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '118.4', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_1', + 'unique_id': '1234567890-current_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.701', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_2', + 'unique_id': '1234567890-current_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.832', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac_phase_3', + 'unique_id': '1234567890-current_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_current_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Smart Meter TS 65A-3 Current phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_current_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.645', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_frequency_phase_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency phase average', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_phase_average', + 'unique_id': '1234567890-frequency_phase_average', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_frequency_phase_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Smart Meter TS 65A-3 Frequency phase average', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_frequency_phase_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter location', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location', + 'unique_id': '1234567890-meter_location', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Meter location', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location_description-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter location description', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location_description', + 'unique_id': '1234567890-meter_location_description', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_meter_location_description-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Smart Meter TS 65A-3 Meter location description', + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_meter_location_description', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'feed_in', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor', + 'unique_id': '1234567890-power_factor', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.698', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_1', + 'unique_id': '1234567890-power_factor_phase_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 1', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.995', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_2', + 'unique_id': '1234567890-power_factor_phase_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 2', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.389', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power factor phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_factor_phase_3', + 'unique_id': '1234567890-power_factor_phase_3', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_power_factor_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Smart Meter TS 65A-3 Power factor phase 3', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_power_factor_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.163', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_consumed', + 'unique_id': '1234567890-energy_reactive_ac_consumed', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy consumed', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5482.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reactive energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_reactive_ac_produced', + 'unique_id': '1234567890-energy_reactive_ac_produced', + 'unit_of_measurement': 'varh', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smart Meter TS 65A-3 Reactive energy produced', + 'state_class': , + 'unit_of_measurement': 'varh', + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3266105.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive', + 'unique_id': '1234567890-power_reactive', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-501.5', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_1', + 'unique_id': '1234567890-power_reactive_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-31.3', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_2', + 'unique_id': '1234567890-power_reactive_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-353.4', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reactive power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_reactive_phase_3', + 'unique_id': '1234567890-power_reactive_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_reactive_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'reactive_power', + 'friendly_name': 'Smart Meter TS 65A-3 Reactive power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_reactive_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-116.7', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_consumed', + 'unique_id': '1234567890-energy_real_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1247204.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_minus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy minus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_minus', + 'unique_id': '1234567890-energy_real_ac_minus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_minus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy minus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_minus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1705128.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_plus-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy plus', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_ac_plus', + 'unique_id': '1234567890-energy_real_ac_plus', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_plus-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy plus', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_plus', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1247204.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_produced-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real energy produced', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_real_produced', + 'unique_id': '1234567890-energy_real_produced', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_energy_produced-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Smart Meter TS 65A-3 Real energy produced', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_energy_produced', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1705128.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real', + 'unique_id': '1234567890-power_real', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '487.7', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_1', + 'unique_id': '1234567890-power_real_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '317.9', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_2', + 'unique_id': '1234567890-power_real_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '150.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real_phase_3', + 'unique_id': '1234567890-power_real_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_real_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Smart Meter TS 65A-3 Real power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_real_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19.6', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_1', + 'unique_id': '1234567890-voltage_ac_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '229.4', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1-2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_12', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_12', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_1_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 1-2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_1_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '396.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_2', + 'unique_id': '1234567890-voltage_ac_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '225.6', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2-3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_23', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_23', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_2_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 2-3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_2_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '393.0', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_3', + 'unique_id': '1234567890-voltage_ac_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '228.3', + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3-1', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac_phase_to_phase_31', + 'unique_id': '1234567890-voltage_ac_phase_to_phase_31', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.smart_meter_ts_65a_3_voltage_phase_3_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Smart Meter TS 65A-3 Voltage phase 3-1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.smart_meter_ts_65a_3_voltage_phase_3_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '394.3', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_meter_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter mode', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_mode', + 'unique_id': 'solar_net_12345678-power_flow-meter_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.solarnet_meter_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Meter mode', + }), + 'context': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'bidirectional', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power battery', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_battery', + 'unique_id': 'solar_net_12345678-power_flow-power_battery', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power battery', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1591', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_battery_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power battery charge', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_battery_charge', + 'unique_id': 'solar_net_12345678-power_flow-power_battery_charge', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power battery charge', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_battery_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_discharge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_battery_discharge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power battery discharge', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_battery_discharge', + 'unique_id': 'solar_net_12345678-power_flow-power_battery_discharge', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_battery_discharge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power battery discharge', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_battery_discharge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1591', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid', + 'unique_id': 'solar_net_12345678-power_flow-power_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2274.9', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid export', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_export', + 'unique_id': 'solar_net_12345678-power_flow-power_grid_export', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_import-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid import', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_import', + 'unique_id': 'solar_net_12345678-power_flow-power_grid_import', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_grid_import-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_import', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2274.9', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load', + 'unique_id': 'solar_net_12345678-power_flow-power_load', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2459.3092', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_consumed', + 'unique_id': 'solar_net_12345678-power_flow-power_load_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2459.3092', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_generated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_generated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load generated', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_generated', + 'unique_id': 'solar_net_12345678-power_flow-power_load_generated', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_load_generated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_photovoltaics-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power photovoltaics', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_photovoltaics', + 'unique_id': 'solar_net_12345678-power_flow-power_photovoltaics', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_power_photovoltaics-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power photovoltaics', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '216.4328', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_autonomy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative autonomy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_autonomy', + 'unique_id': 'solar_net_12345678-power_flow-relative_autonomy', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_autonomy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative autonomy', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.4984', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_self_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative self consumption', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_self_consumption', + 'unique_id': 'solar_net_12345678-power_flow-relative_self_consumption', + 'unit_of_measurement': '%', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_relative_self_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative self consumption', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100.0', + }) +# --- +# name: test_gen24_storage[sensor.solarnet_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': 'solar_net_12345678-power_flow-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_gen24_storage[sensor.solarnet_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7512664.4042', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '234567-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 3.0-1 AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.32', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '234567-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Primo 3.0-1 AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '296', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '234567-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 3.0-1 AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '223.6', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '234567-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 3.0-1 DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.97', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '234567-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 3.0-1 DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '329.5', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_energy_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy day', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_day', + 'unique_id': '234567-energy_day', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 3.0-1 Energy day', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_energy_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14237', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_energy_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy year', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_year', + 'unique_id': '234567-energy_year', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_energy_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 3.0-1 Energy year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_energy_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3596193.25', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '234567-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 Error code', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '234567-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Primo 3.0-1 Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60.01', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_color-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_led_color', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED color', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_color', + 'unique_id': '234567-led_color', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_color-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 LED color', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_led_color', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_led_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_state', + 'unique_id': '234567-led_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_led_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 LED state', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_led_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '234567-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 3.0-1 Status code', + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '234567-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Primo 3.0-1 Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_3_0_1_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '234567-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 3.0-1 Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5796010', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_ac_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_ac', + 'unique_id': '123456-current_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 5.0-1 AC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_ac_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.85', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_ac_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_ac', + 'unique_id': '123456-power_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Primo 5.0-1 AC power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_ac_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '862', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_ac_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_ac', + 'unique_id': '123456-voltage_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_ac_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 5.0-1 AC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_ac_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '223.9', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_dc_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC current', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_dc', + 'unique_id': '123456-current_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Primo 5.0-1 DC current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_dc_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.23', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_dc_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC voltage', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_dc', + 'unique_id': '123456-voltage_dc', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_dc_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Primo 5.0-1 DC voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_dc_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '452.3', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_energy_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy day', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_day', + 'unique_id': '123456-energy_day', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 5.0-1 Energy day', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_energy_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22504', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_energy_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy year', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_year', + 'unique_id': '123456-energy_year', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_energy_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 5.0-1 Energy year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_energy_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7532755.5', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_error_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_error_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Error code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_code', + 'unique_id': '123456-error_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_error_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 Error code', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_error_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_frequency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_frequency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frequency_ac', + 'unique_id': '123456-frequency_ac', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_frequency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Primo 5.0-1 Frequency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_frequency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_color-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_led_color', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED color', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_color', + 'unique_id': '123456-led_color', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_color-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 LED color', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_led_color', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_led_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LED state', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'led_state', + 'unique_id': '123456-led_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_led_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 LED state', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_led_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_status_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status code', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_code', + 'unique_id': '123456-status_code', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Primo 5.0-1 Status code', + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_status_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_status_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status_message', + 'unique_id': '123456-status_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_status_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Primo 5.0-1 Status message', + 'options': list([ + 'startup', + 'running', + 'standby', + 'bootloading', + 'error', + 'idle', + 'ready', + 'sleeping', + 'unknown', + 'invalid', + ]), + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_status_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'running', + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.primo_5_0_1_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': '123456-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Primo 5.0-1 Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17114940', + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter location', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location', + 'unique_id': 'solar_net_123.4567890:S0 Meter at inverter 1-meter_location', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'S0 Meter at inverter 1 Meter location', + }), + 'context': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location_description-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location_description', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Meter location description', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_location_description', + 'unique_id': 'solar_net_123.4567890:S0 Meter at inverter 1-meter_location_description', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_meter_location_description-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'S0 Meter at inverter 1 Meter location description', + 'options': list([ + 'feed_in', + 'consumption_path', + 'external_generator', + 'external_battery', + 'subload', + ]), + }), + 'context': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_meter_location_description', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'consumption_path', + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_real_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.s0_meter_at_inverter_1_real_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Real power', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_real', + 'unique_id': 'solar_net_123.4567890:S0 Meter at inverter 1-power_real', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.s0_meter_at_inverter_1_real_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'S0 Meter at inverter 1 Real power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.s0_meter_at_inverter_1_real_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2216.7487', + }) +# --- +# name: test_primo_s0[sensor.solarnet_co2_factor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_co2_factor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CO₂ factor', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'co2_factor', + 'unique_id': '123.4567890-co2_factor', + 'unit_of_measurement': 'kg/kWh', + }) +# --- +# name: test_primo_s0[sensor.solarnet_co2_factor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet CO₂ factor', + 'state_class': , + 'unit_of_measurement': 'kg/kWh', + }), + 'context': , + 'entity_id': 'sensor.solarnet_co2_factor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.53', + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_energy_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy day', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_day', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_day', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Energy day', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_energy_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '36724', + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_energy_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy year', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_year', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_year', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_energy_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Energy year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_energy_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11128933.25', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_export_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_grid_export_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid export tariff', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cash_factor', + 'unique_id': '123.4567890-cash_factor', + 'unit_of_measurement': 'BRL/kWh', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_export_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Grid export tariff', + 'state_class': , + 'unit_of_measurement': 'BRL/kWh', + }), + 'context': , + 'entity_id': 'sensor.solarnet_grid_export_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_import_tariff-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_grid_import_tariff', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid import tariff', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'delivery_factor', + 'unique_id': '123.4567890-delivery_factor', + 'unit_of_measurement': 'BRL/kWh', + }) +# --- +# name: test_primo_s0[sensor.solarnet_grid_import_tariff-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Grid import tariff', + 'state_class': , + 'unit_of_measurement': 'BRL/kWh', + }), + 'context': , + 'entity_id': 'sensor.solarnet_grid_import_tariff', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_primo_s0[sensor.solarnet_meter_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Meter mode', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_mode', + 'unique_id': 'solar_net_123.4567890-power_flow-meter_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.solarnet_meter_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Meter mode', + }), + 'context': , + 'entity_id': 'sensor.solarnet_meter_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'vague-meter', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '384.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid export', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_export', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_export', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid export', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_import-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_grid_import', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power grid import', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_grid_import', + 'unique_id': 'solar_net_123.4567890-power_flow-power_grid_import', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_grid_import-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power grid import', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_grid_import', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '384.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2218.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_consumed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load consumed', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_consumed', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_consumed', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_consumed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load consumed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_consumed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2218.9349', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_generated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_load_generated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power load generated', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_load_generated', + 'unique_id': 'solar_net_123.4567890-power_flow-power_load_generated', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_load_generated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power load generated', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_load_generated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_photovoltaics-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power photovoltaics', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_photovoltaics', + 'unique_id': 'solar_net_123.4567890-power_flow-power_photovoltaics', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_power_photovoltaics-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'SolarNet Power photovoltaics', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_power_photovoltaics', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1834', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_autonomy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative autonomy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_autonomy', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_autonomy', + 'unit_of_measurement': '%', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_autonomy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative autonomy', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_autonomy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '82.6523', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_self_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Relative self consumption', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relative_self_consumption', + 'unique_id': 'solar_net_123.4567890-power_flow-relative_self_consumption', + 'unit_of_measurement': '%', + }) +# --- +# name: test_primo_s0[sensor.solarnet_relative_self_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SolarNet Relative self consumption', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.solarnet_relative_self_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_primo_s0[sensor.solarnet_total_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solarnet_total_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_total', + 'unique_id': 'solar_net_123.4567890-power_flow-energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_primo_s0[sensor.solarnet_total_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'SolarNet Total energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solarnet_total_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22910919.5', + }) +# --- diff --git a/tests/components/fronius/test_config_flow.py b/tests/components/fronius/test_config_flow.py index ed90e266b81..933b8fad8ef 100644 --- a/tests/components/fronius/test_config_flow.py +++ b/tests/components/fronius/test_config_flow.py @@ -44,43 +44,62 @@ MOCK_DHCP_DATA = DhcpServiceInfo( ) -async def test_form_with_logger(hass: HomeAssistant) -> None: - """Test we get the form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert not result["errors"] - - with ( - patch( - "pyfronius.Fronius.current_logger_info", - return_value=LOGGER_INFO_RETURN_VALUE, - ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, +async def assert_finish_flow_with_logger(hass: HomeAssistant, flow_id: str) -> None: + """Assert finishing the flow with a logger device.""" + with patch( + "pyfronius.Fronius.current_logger_info", + return_value=LOGGER_INFO_RETURN_VALUE, ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], + result = await hass.config_entries.flow.async_configure( + flow_id, { "host": "10.9.8.1", }, ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "SolarNet Datalogger at 10.9.8.1" - assert result2["data"] == { + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "SolarNet Datalogger at 10.9.8.1" + assert result["data"] == { "host": "10.9.8.1", "is_logger": True, } - assert len(mock_setup_entry.mock_calls) == 1 + assert result["result"].unique_id == "123.4567" + + +async def assert_abort_flow_with_logger( + hass: HomeAssistant, flow_id: str, reason: str +) -> config_entries.ConfigFlowResult: + """Assert the flow was aborted when a logger device responded.""" + with patch( + "pyfronius.Fronius.current_logger_info", + return_value=LOGGER_INFO_RETURN_VALUE, + ): + result = await hass.config_entries.flow.async_configure( + flow_id, + { + "host": "10.9.8.1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + return result + + +async def test_form_with_logger(hass: HomeAssistant) -> None: + """Test the basic flow with a logger device.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + await assert_finish_flow_with_logger(hass, result["flow_id"]) async def test_form_with_inverter(hass: HomeAssistant) -> None: - """Test we get the form.""" + """Test the basic flow with a Gen24 device.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -96,10 +115,6 @@ async def test_form_with_inverter(hass: HomeAssistant) -> None: "pyfronius.Fronius.inverter_info", return_value=INVERTER_INFO_RETURN_VALUE, ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -115,7 +130,7 @@ async def test_form_with_inverter(hass: HomeAssistant) -> None: "host": "10.9.1.1", "is_logger": False, } - assert len(mock_setup_entry.mock_calls) == 1 + assert result2["result"].unique_id == "1234567" @pytest.mark.parametrize( @@ -154,6 +169,7 @@ async def test_form_cannot_connect( assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} + await assert_finish_flow_with_logger(hass, result2["flow_id"]) async def test_form_unexpected(hass: HomeAssistant) -> None: @@ -175,13 +191,14 @@ async def test_form_unexpected(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} + await assert_finish_flow_with_logger(hass, result2["flow_id"]) async def test_form_already_existing(hass: HomeAssistant) -> None: """Test existing entry.""" MockConfigEntry( domain=DOMAIN, - unique_id="123.4567", + unique_id=LOGGER_INFO_RETURN_VALUE["unique_identifier"]["value"], data={CONF_HOST: "10.9.8.1", "is_logger": True}, ).add_to_hass(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 @@ -189,20 +206,9 @@ async def test_form_already_existing(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch( - "pyfronius.Fronius.current_logger_info", - return_value=LOGGER_INFO_RETURN_VALUE, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "10.9.8.1", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "already_configured" + await assert_abort_flow_with_logger( + hass, result["flow_id"], reason="already_configured" + ) async def test_config_flow_already_configured( @@ -273,6 +279,7 @@ async def test_dhcp(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> "host": MOCK_DHCP_DATA.ip, "is_logger": True, } + assert result["result"].unique_id == "123.4567" async def test_dhcp_already_configured( @@ -345,10 +352,6 @@ async def test_reconfigure(hass: HomeAssistant) -> None: "pyfronius.Fronius.inverter_info", return_value=INVERTER_INFO_RETURN_VALUE, ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -364,14 +367,13 @@ async def test_reconfigure(hass: HomeAssistant) -> None: "host": new_host, "is_logger": False, } - assert len(mock_setup_entry.mock_calls) == 1 async def test_reconfigure_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id=LOGGER_INFO_RETURN_VALUE["unique_identifier"]["value"], data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -401,12 +403,16 @@ async def test_reconfigure_cannot_connect(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} + await assert_abort_flow_with_logger( + hass, result2["flow_id"], reason="reconfigure_successful" + ) + async def test_reconfigure_unexpected(hass: HomeAssistant) -> None: """Test we handle unexpected error.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id=LOGGER_INFO_RETURN_VALUE["unique_identifier"]["value"], data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -430,12 +436,16 @@ async def test_reconfigure_unexpected(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} + await assert_abort_flow_with_logger( + hass, result2["flow_id"], reason="reconfigure_successful" + ) -async def test_reconfigure_already_configured(hass: HomeAssistant) -> None: - """Test reconfiguring an entry.""" + +async def test_reconfigure_to_different_device(hass: HomeAssistant) -> None: + """Test reconfiguring an entry to a different device.""" entry = MockConfigEntry( domain=DOMAIN, - unique_id="123.4567890", + unique_id="999.9999999", data={ CONF_HOST: "10.1.2.3", "is_logger": True, @@ -447,68 +457,6 @@ async def test_reconfigure_already_configured(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reconfigure" - with ( - patch( - "pyfronius.Fronius.current_logger_info", - return_value=LOGGER_INFO_RETURN_VALUE, - ), - patch( - "pyfronius.Fronius.inverter_info", - return_value=INVERTER_INFO_RETURN_VALUE, - ), - patch( - "homeassistant.components.fronius.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - "host": "10.1.2.3", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "unique_id_mismatch" - assert len(mock_setup_entry.mock_calls) == 0 - - -async def test_reconfigure_already_existing(hass: HomeAssistant) -> None: - """Test reconfiguring entry to already existing device.""" - entry = MockConfigEntry( - domain=DOMAIN, - unique_id="123.4567890", - data={ - CONF_HOST: "10.1.2.3", - "is_logger": True, - }, + await assert_abort_flow_with_logger( + hass, result["flow_id"], reason="unique_id_mismatch" ) - entry.add_to_hass(hass) - - entry_2_uid = "222.2222222" - entry_2 = MockConfigEntry( - domain=DOMAIN, - unique_id=entry_2_uid, - data={ - CONF_HOST: "10.2.2.2", - "is_logger": True, - }, - ) - entry_2.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - with patch( - "pyfronius.Fronius.current_logger_info", - return_value={"unique_identifier": {"value": entry_2_uid}}, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "10.1.1.1", - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "unique_id_mismatch" diff --git a/tests/components/fronius/test_coordinator.py b/tests/components/fronius/test_coordinator.py index 13a08bbe70e..fab2d509767 100644 --- a/tests/components/fronius/test_coordinator.py +++ b/tests/components/fronius/test_coordinator.py @@ -29,7 +29,7 @@ async def test_adaptive_update_interval( mock_inverter_data.reset_mock() freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_inverter_data.assert_called_once() mock_inverter_data.reset_mock() @@ -38,13 +38,13 @@ async def test_adaptive_update_interval( # first 3 bad requests at default interval - 4th has different interval for _ in range(3): freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_inverter_data.call_count == 3 mock_inverter_data.reset_mock() freezer.tick(FroniusInverterUpdateCoordinator.error_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_inverter_data.call_count == 1 mock_inverter_data.reset_mock() @@ -52,13 +52,13 @@ async def test_adaptive_update_interval( mock_inverter_data.side_effect = None # next successful request resets to default interval freezer.tick(FroniusInverterUpdateCoordinator.error_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_inverter_data.assert_called_once() mock_inverter_data.reset_mock() freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() mock_inverter_data.assert_called_once() mock_inverter_data.reset_mock() @@ -68,7 +68,7 @@ async def test_adaptive_update_interval( # first 3 requests at default interval - 4th has different interval for _ in range(3): freezer.tick(FroniusInverterUpdateCoordinator.default_interval) - async_fire_time_changed(hass, None) + async_fire_time_changed(hass) await hass.async_block_till_done() # BadStatusError does 3 silent retries for inverter endpoint * 3 request intervals = 9 assert mock_inverter_data.call_count == 9 diff --git a/tests/components/fronius/test_init.py b/tests/components/fronius/test_init.py index 9d570785073..a950ed4e296 100644 --- a/tests/components/fronius/test_init.py +++ b/tests/components/fronius/test_init.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from pyfronius import FroniusError from homeassistant.components.fronius.const import DOMAIN, SOLAR_NET_RESCAN_TIMER @@ -10,7 +11,6 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util from . import mock_responses, setup_fronius_integration @@ -66,6 +66,7 @@ async def test_inverter_night_rescan( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, + freezer: FrozenDateTimeFactory, ) -> None: """Test dynamic adding of an inverter discovered automatically after a Home Assistant reboot during the night.""" mock_responses(aioclient_mock, fixture_set="igplus_v2", night=True) @@ -78,9 +79,8 @@ async def test_inverter_night_rescan( # Switch to daytime mock_responses(aioclient_mock, fixture_set="igplus_v2", night=False) - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() # We expect our inverter to be present now @@ -88,9 +88,8 @@ async def test_inverter_night_rescan( assert inverter_1.manufacturer == "Fronius" # After another re-scan we still only expect this inverter - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER * 2) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() inverter_1 = device_registry.async_get_device(identifiers={(DOMAIN, "203200")}) assert inverter_1.manufacturer == "Fronius" @@ -100,6 +99,7 @@ async def test_inverter_rescan_interruption( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, + freezer: FrozenDateTimeFactory, ) -> None: """Test interruption of re-scan during runtime to process further.""" mock_responses(aioclient_mock, fixture_set="igplus_v2", night=True) @@ -115,9 +115,8 @@ async def test_inverter_rescan_interruption( "pyfronius.Fronius.inverter_info", side_effect=FroniusError, ): - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() # No increase of devices expected because of a FroniusError @@ -132,9 +131,8 @@ async def test_inverter_rescan_interruption( # Next re-scan will pick up the new inverter. Expect 2 devices now. mock_responses(aioclient_mock, fixture_set="igplus_v2", night=False) - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(minutes=SOLAR_NET_RESCAN_TIMER * 2) - ) + freezer.tick(timedelta(minutes=SOLAR_NET_RESCAN_TIMER)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert ( diff --git a/tests/components/fronius/test_sensor.py b/tests/components/fronius/test_sensor.py index 04c25ce26f2..b5d051d56ca 100644 --- a/tests/components/fronius/test_sensor.py +++ b/tests/components/fronius/test_sensor.py @@ -2,27 +2,29 @@ from freezegun.api import FrozenDateTimeFactory import pytest +from syrupy import SnapshotAssertion from homeassistant.components.fronius.const import DOMAIN from homeassistant.components.fronius.coordinator import ( FroniusInverterUpdateCoordinator, - FroniusMeterUpdateCoordinator, FroniusPowerFlowUpdateCoordinator, ) from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er -from . import enable_all_entities, mock_responses, setup_fronius_integration +from . import mock_responses, setup_fronius_integration -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_symo_inverter( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Symo inverter entities.""" @@ -32,15 +34,8 @@ async def test_symo_inverter( # Init at night mock_responses(aioclient_mock, night=True) - config_entry = await setup_fronius_integration(hass) + await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 22 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusInverterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 assert_state("sensor.symo_20_dc_current", 0) assert_state("sensor.symo_20_energy_day", 10828) @@ -54,13 +49,6 @@ async def test_symo_inverter( freezer.tick(FroniusInverterUpdateCoordinator.default_interval) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 62 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusInverterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 # 4 additional AC entities assert_state("sensor.symo_20_dc_current", 2.19) @@ -104,6 +92,7 @@ async def test_symo_logger( assert_state("sensor.solarnet_grid_import_tariff", 0.15) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_symo_meter( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -117,15 +106,8 @@ async def test_symo_meter( assert state.state == str(expected_state) mock_responses(aioclient_mock) - config_entry = await setup_fronius_integration(hass) + await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 26 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 # states are rounded to 4 decimals assert_state("sensor.smart_meter_63a_current_phase_1", 7.755) @@ -206,6 +188,7 @@ async def test_symo_meter_forged( ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_symo_power_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -220,15 +203,8 @@ async def test_symo_power_flow( # First test at night mock_responses(aioclient_mock, night=True) - config_entry = await setup_fronius_integration(hass) + await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 22 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusInverterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 # states are rounded to 4 decimals assert_state("sensor.solarnet_energy_day", 10828) @@ -277,10 +253,13 @@ async def test_symo_power_flow( assert_state("sensor.solarnet_relative_self_consumption", 0) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_gen24( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Gen24 inverter entities.""" @@ -292,72 +271,10 @@ async def test_gen24( mock_responses(aioclient_mock, fixture_set="gen24") config_entry = await setup_fronius_integration(hass, is_logger=False) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 24 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 - # inverter 1 - assert_state("sensor.inverter_name_ac_current", 0.1589) - assert_state("sensor.inverter_name_dc_current_2", 0.0754) - assert_state("sensor.inverter_name_status_code", 7) - assert_state("sensor.inverter_name_status_message", "running") - assert_state("sensor.inverter_name_dc_current", 0.0783) - assert_state("sensor.inverter_name_dc_voltage_2", 403.4312) - assert_state("sensor.inverter_name_ac_power", 37.3204) - assert_state("sensor.inverter_name_error_code", 0) - assert_state("sensor.inverter_name_dc_voltage", 411.3811) - assert_state("sensor.inverter_name_total_energy", 1530193.42) - assert_state("sensor.inverter_name_inverter_state", "Running") - assert_state("sensor.inverter_name_ac_voltage", 234.9168) - assert_state("sensor.inverter_name_frequency", 49.9917) - # meter - assert_state("sensor.smart_meter_ts_65a_3_real_energy_produced", 3863340.0) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_consumed", 2013105.0) - assert_state("sensor.smart_meter_ts_65a_3_real_power", 653.1) - assert_state("sensor.smart_meter_ts_65a_3_frequency_phase_average", 49.9) - assert_state("sensor.smart_meter_ts_65a_3_meter_location", 0) - assert_state("sensor.smart_meter_ts_65a_3_meter_location_description", "feed_in") - assert_state("sensor.smart_meter_ts_65a_3_power_factor", 0.828) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_consumed", 88221.0) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_minus", 3863340.0) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_2", 2.33) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1", 235.9) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1_2", 408.7) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_2", 294.9) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_plus", 2013105.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2", 236.1) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_produced", 1989125.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3", 236.9) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_1", 0.441) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2_3", 409.6) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_3", 1.825) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_3", 0.832) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_1", 243.3) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3_1", 409.4) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_2", 323.4) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_3", 301.2) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_1", 106.8) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_2", 0.934) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_3", 251.3) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_1", -218.6) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_2", -132.8) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_3", -166.0) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power", 868.0) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power", -517.4) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_1", 1.145) - # power_flow - assert_state("sensor.solarnet_power_grid", 658.4) - assert_state("sensor.solarnet_relative_self_consumption", 100.0) - assert_state("sensor.solarnet_power_photovoltaics", 62.9481) - assert_state("sensor.solarnet_power_load", -695.6827) - assert_state("sensor.solarnet_meter_mode", "meter") - assert_state("sensor.solarnet_relative_autonomy", 5.3592) - assert_state("sensor.solarnet_total_energy", 1530193.42) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + assert_state("sensor.inverter_name_total_energy", 1530193.42) # Gen24 devices may report 0 for total energy while doing firmware updates. # This should yield "unknown" state instead of 0. mock_responses( @@ -375,11 +292,14 @@ async def test_gen24( assert_state("sensor.inverter_name_total_energy", "unknown") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_gen24_storage( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Gen24 inverter with BYD battery and Ohmpilot entities.""" @@ -393,87 +313,8 @@ async def test_gen24_storage( hass, is_logger=False, unique_id="12345678" ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 37 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 72 - # inverter 1 - assert_state("sensor.gen24_storage_dc_current", 0.3952) - assert_state("sensor.gen24_storage_dc_voltage_2", 318.8103) - assert_state("sensor.gen24_storage_dc_current_2", 0.3564) - assert_state("sensor.gen24_storage_ac_current", 1.1087) - assert_state("sensor.gen24_storage_ac_power", 250.9093) - assert_state("sensor.gen24_storage_error_code", 0) - assert_state("sensor.gen24_storage_status_code", 7) - assert_state("sensor.gen24_storage_status_message", "running") - assert_state("sensor.gen24_storage_total_energy", 7512794.0117) - assert_state("sensor.gen24_storage_inverter_state", "Running") - assert_state("sensor.gen24_storage_dc_voltage", 419.1009) - assert_state("sensor.gen24_storage_ac_voltage", 227.354) - assert_state("sensor.gen24_storage_frequency", 49.9816) - # meter - assert_state("sensor.smart_meter_ts_65a_3_real_energy_produced", 1705128.0) - assert_state("sensor.smart_meter_ts_65a_3_real_power", 487.7) - assert_state("sensor.smart_meter_ts_65a_3_power_factor", 0.698) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_consumed", 1247204.0) - assert_state("sensor.smart_meter_ts_65a_3_frequency_phase_average", 49.9) - assert_state("sensor.smart_meter_ts_65a_3_meter_location", 0) - assert_state("sensor.smart_meter_ts_65a_3_meter_location_description", "feed_in") - assert_state("sensor.smart_meter_ts_65a_3_reactive_power", -501.5) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_produced", 3266105.0) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_3", 19.6) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_3", 0.645) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_minus", 1705128.0) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_2", 383.9) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_1", 1.701) - assert_state("sensor.smart_meter_ts_65a_3_current_phase_2", 1.832) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_1", 319.5) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1", 229.4) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_2", 150.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3_1", 394.3) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2", 225.6) - assert_state("sensor.smart_meter_ts_65a_3_reactive_energy_consumed", 5482.0) - assert_state("sensor.smart_meter_ts_65a_3_real_energy_plus", 1247204.0) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_1", 0.995) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_3", 0.163) - assert_state("sensor.smart_meter_ts_65a_3_power_factor_phase_2", 0.389) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_1", -31.3) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_3", -116.7) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_1_2", 396.0) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_2_3", 393.0) - assert_state("sensor.smart_meter_ts_65a_3_reactive_power_phase_2", -353.4) - assert_state("sensor.smart_meter_ts_65a_3_real_power_phase_1", 317.9) - assert_state("sensor.smart_meter_ts_65a_3_voltage_phase_3", 228.3) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power", 821.9) - assert_state("sensor.smart_meter_ts_65a_3_apparent_power_phase_3", 118.4) - # ohmpilot - assert_state("sensor.ohmpilot_energy_consumed", 1233295.0) - assert_state("sensor.ohmpilot_power", 0.0) - assert_state("sensor.ohmpilot_temperature", 38.9) - assert_state("sensor.ohmpilot_state_code", 0.0) - assert_state("sensor.ohmpilot_state_message", "up_and_running") - # power_flow - assert_state("sensor.solarnet_power_grid", 2274.9) - assert_state("sensor.solarnet_power_battery", 0.1591) - assert_state("sensor.solarnet_power_battery_charge", 0) - assert_state("sensor.solarnet_power_battery_discharge", 0.1591) - assert_state("sensor.solarnet_power_load", -2459.3092) - assert_state("sensor.solarnet_relative_self_consumption", 100.0) - assert_state("sensor.solarnet_power_photovoltaics", 216.4328) - assert_state("sensor.solarnet_relative_autonomy", 7.4984) - assert_state("sensor.solarnet_meter_mode", "bidirectional") - assert_state("sensor.solarnet_total_energy", 7512664.4042) - # storage - assert_state("sensor.byd_battery_box_premium_hv_dc_current", 0.0) - assert_state("sensor.byd_battery_box_premium_hv_state_of_charge", 4.6) - assert_state("sensor.byd_battery_box_premium_hv_maximum_capacity", 16588) - assert_state("sensor.byd_battery_box_premium_hv_temperature", 21.5) - assert_state("sensor.byd_battery_box_premium_hv_designed_capacity", 16588) - assert_state("sensor.byd_battery_box_premium_hv_dc_voltage", 0.0) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Devices solar_net = device_registry.async_get_device( @@ -507,11 +348,14 @@ async def test_gen24_storage( assert storage.name == "BYD Battery-Box Premium HV" +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_primo_s0( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test Fronius Primo dual inverter with S0 meter entities.""" @@ -523,64 +367,8 @@ async def test_primo_s0( mock_responses(aioclient_mock, fixture_set="primo_s0", inverter_ids=[1, 2]) config_entry = await setup_fronius_integration(hass, is_logger=True) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 31 - await enable_all_entities( - hass, - freezer, - config_entry.entry_id, - FroniusMeterUpdateCoordinator.default_interval, - ) assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 47 - # logger - assert_state("sensor.solarnet_grid_export_tariff", 1) - assert_state("sensor.solarnet_co2_factor", 0.53) - assert_state("sensor.solarnet_grid_import_tariff", 1) - # inverter 1 - assert_state("sensor.primo_5_0_1_total_energy", 17114940) - assert_state("sensor.primo_5_0_1_energy_day", 22504) - assert_state("sensor.primo_5_0_1_dc_voltage", 452.3) - assert_state("sensor.primo_5_0_1_ac_power", 862) - assert_state("sensor.primo_5_0_1_error_code", 0) - assert_state("sensor.primo_5_0_1_dc_current", 4.23) - assert_state("sensor.primo_5_0_1_status_code", 7) - assert_state("sensor.primo_5_0_1_status_message", "running") - assert_state("sensor.primo_5_0_1_energy_year", 7532755.5) - assert_state("sensor.primo_5_0_1_ac_current", 3.85) - assert_state("sensor.primo_5_0_1_ac_voltage", 223.9) - assert_state("sensor.primo_5_0_1_frequency", 60) - assert_state("sensor.primo_5_0_1_led_color", 2) - assert_state("sensor.primo_5_0_1_led_state", 0) - # inverter 2 - assert_state("sensor.primo_3_0_1_total_energy", 5796010) - assert_state("sensor.primo_3_0_1_energy_day", 14237) - assert_state("sensor.primo_3_0_1_dc_voltage", 329.5) - assert_state("sensor.primo_3_0_1_ac_power", 296) - assert_state("sensor.primo_3_0_1_error_code", 0) - assert_state("sensor.primo_3_0_1_dc_current", 0.97) - assert_state("sensor.primo_3_0_1_status_code", 7) - assert_state("sensor.primo_3_0_1_status_message", "running") - assert_state("sensor.primo_3_0_1_energy_year", 3596193.25) - assert_state("sensor.primo_3_0_1_ac_current", 1.32) - assert_state("sensor.primo_3_0_1_ac_voltage", 223.6) - assert_state("sensor.primo_3_0_1_frequency", 60.01) - assert_state("sensor.primo_3_0_1_led_color", 2) - assert_state("sensor.primo_3_0_1_led_state", 0) - # meter - assert_state("sensor.s0_meter_at_inverter_1_meter_location", 1) - assert_state( - "sensor.s0_meter_at_inverter_1_meter_location_description", "consumption_path" - ) - assert_state("sensor.s0_meter_at_inverter_1_real_power", -2216.7487) - # power_flow - assert_state("sensor.solarnet_power_load", -2218.9349) - assert_state("sensor.solarnet_meter_mode", "vague-meter") - assert_state("sensor.solarnet_power_photovoltaics", 1834) - assert_state("sensor.solarnet_power_grid", 384.9349) - assert_state("sensor.solarnet_relative_self_consumption", 100) - assert_state("sensor.solarnet_relative_autonomy", 82.6523) - assert_state("sensor.solarnet_total_energy", 22910919.5) - assert_state("sensor.solarnet_energy_day", 36724) - assert_state("sensor.solarnet_energy_year", 11128933.25) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Devices solar_net = device_registry.async_get_device( From 6ca5f3e82874d155c2a0cb4c34459d109bd9fa9c Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 15 Dec 2024 10:42:22 -0800 Subject: [PATCH 279/677] Mark Google Tasks `test-before-setup` quality scale rule as `done` (#133298) --- homeassistant/components/google_tasks/quality_scale.yaml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml index 0cecb88484f..671b744d080 100644 --- a/homeassistant/components/google_tasks/quality_scale.yaml +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -20,12 +20,7 @@ rules: entity-unique-id: done docs-installation-instructions: done docs-removal-instructions: todo - test-before-setup: - status: todo - comment: | - The integration refreshes the access token, but does not poll the API. The - setup can be changed to request the list of todo lists in setup instead - of during platform setup. + test-before-setup: done docs-high-level-description: done config-flow-test-coverage: done docs-actions: From 2003fc7ae0ffc336e94933a65915ca026b5d8145 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 15 Dec 2024 19:42:54 +0100 Subject: [PATCH 280/677] Adjust MQTT tests not to assert on deprecated color_temp attribute (#133198) --- tests/components/mqtt/test_light.py | 28 +++++++-------- tests/components/mqtt/test_light_json.py | 38 ++++++++++---------- tests/components/mqtt/test_light_template.py | 20 +++++------ 3 files changed, 43 insertions(+), 43 deletions(-) diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index ed4b16e3d0c..dbca09e803c 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -270,7 +270,7 @@ async def test_no_color_brightness_color_temp_hs_white_xy_if_no_topics( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None @@ -285,7 +285,7 @@ async def test_no_color_brightness_color_temp_hs_white_xy_if_no_topics( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None assert state.attributes.get("rgbw_color") is None @@ -350,7 +350,7 @@ async def test_controlling_state_via_topic( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -366,7 +366,7 @@ async def test_controlling_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -649,7 +649,7 @@ async def test_invalid_state_via_topic( assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("xy_color") is None @@ -665,7 +665,7 @@ async def test_invalid_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") == (0, 0) assert state.attributes.get("xy_color") == (0.323, 0.329) @@ -723,14 +723,14 @@ async def test_invalid_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 251) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") == 153 + assert state.attributes.get("color_temp_kelvin") == 6535 assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") == (54.768, 1.6) assert state.attributes.get("xy_color") == (0.325, 0.333) async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "") light_state = hass.states.get("light.test") - assert light_state.attributes["color_temp"] == 153 + assert light_state.attributes["color_temp_kelvin"] == 6535 @pytest.mark.parametrize( @@ -939,7 +939,7 @@ async def test_controlling_state_via_topic_with_templates( hass, "test_light_rgb/color_temp/status", '{"hello": "300"}' ) state = hass.states.get("light.test") - assert state.attributes.get("color_temp") == 300 + assert state.attributes.get("color_temp_kelvin") == 3333 assert state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes @@ -1160,7 +1160,7 @@ async def test_sending_mqtt_commands_and_optimistic( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 60 - assert state.attributes.get("color_temp") == 125 + assert state.attributes.get("color_temp_kelvin") == 8000 assert state.attributes.get(light.ATTR_COLOR_MODE) == "color_temp" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes @@ -2103,7 +2103,7 @@ async def test_explicit_color_mode( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -2119,7 +2119,7 @@ async def test_explicit_color_mode( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -2248,7 +2248,7 @@ async def test_explicit_color_mode_templated( state = hass.states.get("light.test") assert state.state == STATE_UNKNOWN assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) is None assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes @@ -2258,7 +2258,7 @@ async def test_explicit_color_mode_templated( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("hs_color") is None assert state.attributes.get(light.ATTR_COLOR_MODE) == "unknown" assert state.attributes.get(light.ATTR_SUPPORTED_COLOR_MODES) == color_modes diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index c6032678a47..988cce85653 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -456,7 +456,7 @@ async def test_turn_on_with_unknown_color_mode_optimistic( state = hass.states.get("light.test") assert state.attributes.get("color_mode") == light.ColorMode.UNKNOWN assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.state == STATE_ON # Turn on the light with brightness or color_temp attributes @@ -466,7 +466,7 @@ async def test_turn_on_with_unknown_color_mode_optimistic( state = hass.states.get("light.test") assert state.attributes.get("color_mode") == light.ColorMode.COLOR_TEMP assert state.attributes.get("brightness") == 50 - assert state.attributes.get("color_temp") == 192 + assert state.attributes.get("color_temp_kelvin") == 5208 assert state.state == STATE_ON @@ -571,7 +571,7 @@ async def test_no_color_brightness_color_temp_if_no_topics( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") is None assert state.attributes.get("hs_color") is None @@ -582,7 +582,7 @@ async def test_no_color_brightness_color_temp_if_no_topics( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") is None assert state.attributes.get("hs_color") is None @@ -636,7 +636,7 @@ async def test_controlling_state_via_topic( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") is None assert state.attributes.get("hs_color") is None @@ -657,7 +657,7 @@ async def test_controlling_state_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None # rgb color has priority + assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority assert state.attributes.get("effect") == "colorloop" assert state.attributes.get("xy_color") == (0.323, 0.329) assert state.attributes.get("hs_color") == (0.0, 0.0) @@ -681,7 +681,7 @@ async def test_controlling_state_via_topic( 249, ) # temp converted to color assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") == 155 + assert state.attributes.get("color_temp_kelvin") == 6451 assert state.attributes.get("effect") == "colorloop" assert state.attributes.get("xy_color") == (0.328, 0.333) # temp converted to color assert state.attributes.get("hs_color") == (44.098, 2.43) # temp converted to color @@ -798,7 +798,7 @@ async def test_controlling_state_via_topic2( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("brightness") is None assert state.attributes.get("color_mode") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -824,7 +824,7 @@ async def test_controlling_state_via_topic2( assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_mode") == "rgbww" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") == "colorloop" assert state.attributes.get("hs_color") == (20.552, 70.98) assert state.attributes.get("rgb_color") == (255, 136, 74) @@ -890,7 +890,7 @@ async def test_controlling_state_via_topic2( ) state = hass.states.get("light.test") assert state.attributes.get("color_mode") == "color_temp" - assert state.attributes.get("color_temp") == 155 + assert state.attributes.get("color_temp_kelvin") == 6451 # White async_fire_mqtt_message( @@ -969,7 +969,7 @@ async def test_controlling_the_state_with_legacy_color_handling( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("brightness") is None assert state.attributes.get("color_mode") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -994,7 +994,7 @@ async def test_controlling_the_state_with_legacy_color_handling( assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_mode") == "hs" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") == (15.765, 100.0) assert state.attributes.get("rgb_color") == (255, 67, 0) @@ -1016,7 +1016,7 @@ async def test_controlling_the_state_with_legacy_color_handling( assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_mode") == "color_temp" - assert state.attributes.get("color_temp") == 353 + assert state.attributes.get("color_temp_kelvin") == 2832 assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") == (28.125, 61.661) assert state.attributes.get("rgb_color") == (255, 171, 98) @@ -1099,7 +1099,7 @@ async def test_sending_mqtt_commands_and_optimistic( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("color_mode") == light.ColorMode.COLOR_TEMP - assert state.attributes.get("color_temp") == 90 + assert state.attributes.get("color_temp_kelvin") == 11111 await common.async_turn_off(hass, "light.test") @@ -1227,7 +1227,7 @@ async def test_sending_mqtt_commands_and_optimistic2( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("brightness") == 95 assert state.attributes.get("color_mode") == "rgb" - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") == "random" assert state.attributes.get("hs_color") is None assert state.attributes.get("rgb_color") is None @@ -2200,7 +2200,7 @@ async def test_invalid_values( assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == expected_features assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) # Turn on the light @@ -2218,7 +2218,7 @@ async def test_invalid_values( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None # Empty color value async_fire_mqtt_message( hass, @@ -2283,7 +2283,7 @@ async def test_invalid_values( ) state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 100 + assert state.attributes.get("color_temp_kelvin") == 10000 # Bad color temperature async_fire_mqtt_message( @@ -2297,7 +2297,7 @@ async def test_invalid_values( # Color temperature should not have changed state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 100 + assert state.attributes.get("color_temp_kelvin") == 10000 @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index 5ffff578b5b..4d2b93ff159 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -252,7 +252,7 @@ async def test_state_change_via_topic( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "test_light_rgb", "on") @@ -261,7 +261,7 @@ async def test_state_change_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None async_fire_mqtt_message(hass, "test_light_rgb", "off") @@ -316,7 +316,7 @@ async def test_state_brightness_color_effect_temp_change_via_topic( assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None assert state.attributes.get("effect") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) # turn on the light @@ -326,7 +326,7 @@ async def test_state_brightness_color_effect_temp_change_via_topic( assert state.state == STATE_ON assert state.attributes.get("rgb_color") == (255, 128, 64) assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None # rgb color has priority + assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority assert state.attributes.get("effect") is None # turn on the light @@ -340,7 +340,7 @@ async def test_state_brightness_color_effect_temp_change_via_topic( 255, ) # temp converted to color assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") == 145 + assert state.attributes.get("color_temp_kelvin") == 6896 assert state.attributes.get("effect") is None assert state.attributes.get("xy_color") == (0.317, 0.317) # temp converted to color assert state.attributes.get("hs_color") == ( @@ -472,7 +472,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 70 + assert state.attributes.get("color_temp_kelvin") == 14285 # Set full brightness await common.async_turn_on(hass, "light.test", brightness=255) @@ -848,7 +848,7 @@ async def test_invalid_values( assert state.state == STATE_UNKNOWN assert state.attributes.get("rgb_color") is None assert state.attributes.get("brightness") is None - assert state.attributes.get("color_temp") is None + assert state.attributes.get("color_temp_kelvin") is None assert state.attributes.get("effect") is None assert not state.attributes.get(ATTR_ASSUMED_STATE) @@ -858,7 +858,7 @@ async def test_invalid_values( state = hass.states.get("light.test") assert state.state == STATE_ON assert state.attributes.get("brightness") == 255 - assert state.attributes.get("color_temp") is None # hs_color has priority + assert state.attributes.get("color_temp_kelvin") is None # hs_color has priority assert state.attributes.get("rgb_color") == (255, 255, 255) assert state.attributes.get("effect") == "rainbow" @@ -887,14 +887,14 @@ async def test_invalid_values( async_fire_mqtt_message(hass, "test_light_rgb", "on,,215,None-None-None") state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("color_temp") == 215 + assert state.attributes.get("color_temp_kelvin") == 4651 # bad color temp values async_fire_mqtt_message(hass, "test_light_rgb", "on,,off,") # color temp should not have changed state = hass.states.get("light.test") - assert state.attributes.get("color_temp") == 215 + assert state.attributes.get("color_temp_kelvin") == 4651 # bad effect value async_fire_mqtt_message(hass, "test_light_rgb", "on,255,a-b-c,white") From 81c12db6cd5cb772ea2579e56d5c319fdab8eb15 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sun, 15 Dec 2024 20:19:56 +0100 Subject: [PATCH 281/677] Fix missing Fronius data_description translation for reconfigure flow (#133304) --- homeassistant/components/fronius/strings.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index 9a2b498f28c..51cb087efc2 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -18,6 +18,9 @@ "description": "Update your configuration information for {device}.", "data": { "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "[%key:component::fronius::config::step::user::data_description::host%]" } } }, From b77e42e8f3482a772fe84833d23dc9c985fbf6c3 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 15 Dec 2024 11:23:56 -0800 Subject: [PATCH 282/677] Increase test coverage for google tasks init (#133252) --- .../components/google_tasks/quality_scale.yaml | 8 ++------ tests/components/google_tasks/test_init.py | 17 ++++++++++++++--- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml index 671b744d080..79d216709e5 100644 --- a/homeassistant/components/google_tasks/quality_scale.yaml +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -31,16 +31,12 @@ rules: # Silver log-when-unavailable: done config-entry-unloading: done - reauthentication-flow: - status: todo - comment: Missing a test that reauthenticates with the wrong account + reauthentication-flow: done action-exceptions: done docs-installation-parameters: todo integration-owner: done parallel-updates: done - test-coverage: - status: todo - comment: Test coverage for __init__.py is not above 95% yet + test-coverage: done docs-configuration-parameters: todo entity-unavailable: done diff --git a/tests/components/google_tasks/test_init.py b/tests/components/google_tasks/test_init.py index 4bb2bd1eed7..9ad8c887a66 100644 --- a/tests/components/google_tasks/test_init.py +++ b/tests/components/google_tasks/test_init.py @@ -6,6 +6,7 @@ from http import HTTPStatus import time from unittest.mock import Mock +from aiohttp import ClientError from httplib2 import Response import pytest @@ -72,20 +73,28 @@ async def test_expired_token_refresh_success( @pytest.mark.parametrize( - ("expires_at", "status", "expected_state"), + ("expires_at", "status", "exc", "expected_state"), [ ( time.time() - 3600, http.HTTPStatus.UNAUTHORIZED, + None, ConfigEntryState.SETUP_ERROR, ), ( time.time() - 3600, http.HTTPStatus.INTERNAL_SERVER_ERROR, + None, + ConfigEntryState.SETUP_RETRY, + ), + ( + time.time() - 3600, + None, + ClientError("error"), ConfigEntryState.SETUP_RETRY, ), ], - ids=["unauthorized", "internal_server_error"], + ids=["unauthorized", "internal_server_error", "client_error"], ) async def test_expired_token_refresh_failure( hass: HomeAssistant, @@ -93,7 +102,8 @@ async def test_expired_token_refresh_failure( aioclient_mock: AiohttpClientMocker, config_entry: MockConfigEntry, setup_credentials: None, - status: http.HTTPStatus, + status: http.HTTPStatus | None, + exc: Exception | None, expected_state: ConfigEntryState, ) -> None: """Test failure while refreshing token with a transient error.""" @@ -102,6 +112,7 @@ async def test_expired_token_refresh_failure( aioclient_mock.post( OAUTH2_TOKEN, status=status, + exc=exc, ) await integration_setup() From 5cc8d9e10509a699c00922fd05aad47739ca3492 Mon Sep 17 00:00:00 2001 From: Simone Chemelli Date: Sun, 15 Dec 2024 14:27:19 -0500 Subject: [PATCH 283/677] Full test coverage for Vodafone Station button platform (#133281) --- tests/components/vodafone_station/const.py | 6 +- .../vodafone_station/test_button.py | 56 +++++++++++++++++++ 2 files changed, 60 insertions(+), 2 deletions(-) create mode 100644 tests/components/vodafone_station/test_button.py diff --git a/tests/components/vodafone_station/const.py b/tests/components/vodafone_station/const.py index 9adf32b339d..fc6bbd01398 100644 --- a/tests/components/vodafone_station/const.py +++ b/tests/components/vodafone_station/const.py @@ -29,11 +29,13 @@ DEVICE_DATA_QUERY = { mac="xx:xx:xx:xx:xx:xx", type="laptop", wifi="2.4G", - ) + ), } +SERIAL = "m123456789" + SENSOR_DATA_QUERY = { - "sys_serial_number": "M123456789", + "sys_serial_number": SERIAL, "sys_firmware_version": "XF6_4.0.05.04", "sys_bootloader_version": "0220", "sys_hardware_version": "RHG3006 v1", diff --git a/tests/components/vodafone_station/test_button.py b/tests/components/vodafone_station/test_button.py new file mode 100644 index 00000000000..8b9b0753caa --- /dev/null +++ b/tests/components/vodafone_station/test_button.py @@ -0,0 +1,56 @@ +"""Tests for Vodafone Station button platform.""" + +from unittest.mock import patch + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.vodafone_station.const import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_registry import EntityRegistry + +from .const import DEVICE_DATA_QUERY, MOCK_USER_DATA, SENSOR_DATA_QUERY, SERIAL + +from tests.common import MockConfigEntry + + +async def test_button(hass: HomeAssistant, entity_registry: EntityRegistry) -> None: + """Test device restart button.""" + + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + entry.add_to_hass(hass) + + with ( + patch("aiovodafone.api.VodafoneStationSercommApi.login"), + patch( + "aiovodafone.api.VodafoneStationSercommApi.get_devices_data", + return_value=DEVICE_DATA_QUERY, + ), + patch( + "aiovodafone.api.VodafoneStationSercommApi.get_sensor_data", + return_value=SENSOR_DATA_QUERY, + ), + patch( + "aiovodafone.api.VodafoneStationSercommApi.restart_router", + ) as mock_router_restart, + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + entity_id = f"button.vodafone_station_{SERIAL}_restart" + + # restart button + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNKNOWN + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"{SERIAL}_reboot" + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert mock_router_restart.call_count == 1 From 89387760d3b6eb46e0c8001b87ff0eb1564758b0 Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Sun, 15 Dec 2024 20:44:28 +0100 Subject: [PATCH 284/677] Cleanup tests for tedee (#133306) --- tests/components/tedee/__init__.py | 13 + tests/components/tedee/conftest.py | 6 +- .../tedee/snapshots/test_binary_sensor.ambr | 278 +++++++++++++++--- .../components/tedee/snapshots/test_init.ambr | 32 ++ .../components/tedee/snapshots/test_lock.ambr | 173 ++++++----- .../tedee/snapshots/test_sensor.ambr | 140 +++++++-- tests/components/tedee/test_binary_sensor.py | 19 +- tests/components/tedee/test_init.py | 52 ++-- tests/components/tedee/test_lock.py | 54 ++-- tests/components/tedee/test_sensor.py | 21 +- 10 files changed, 567 insertions(+), 221 deletions(-) diff --git a/tests/components/tedee/__init__.py b/tests/components/tedee/__init__.py index a72b1fbdd6a..0bff030d2df 100644 --- a/tests/components/tedee/__init__.py +++ b/tests/components/tedee/__init__.py @@ -1 +1,14 @@ """Add tests for Tedee components.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the acaia integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/tedee/conftest.py b/tests/components/tedee/conftest.py index 8e028cb5300..d659560ee61 100644 --- a/tests/components/tedee/conftest.py +++ b/tests/components/tedee/conftest.py @@ -14,6 +14,8 @@ from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant +from . import setup_integration + from tests.common import MockConfigEntry, load_fixture WEBHOOK_ID = "bq33efxmdi3vxy55q2wbnudbra7iv8mjrq9x0gea33g4zqtd87093pwveg8xcb33" @@ -84,8 +86,6 @@ async def init_integration( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock ) -> MockConfigEntry: """Set up the Tedee integration for testing.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) return mock_config_entry diff --git a/tests/components/tedee/snapshots/test_binary_sensor.ambr b/tests/components/tedee/snapshots/test_binary_sensor.ambr index 385e4ac9bc1..e3238dacda1 100644 --- a/tests/components/tedee/snapshots/test_binary_sensor.ambr +++ b/tests/components/tedee/snapshots/test_binary_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_binary_sensors[entry-charging] +# name: test_binary_sensors[binary_sensor.lock_1a2b_charging-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -32,7 +32,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[entry-lock_uncalibrated] +# name: test_binary_sensors[binary_sensor.lock_1a2b_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery_charging', + 'friendly_name': 'Lock-1A2B Charging', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_1a2b_lock_uncalibrated-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -65,7 +79,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[entry-pullspring_enabled] +# name: test_binary_sensors[binary_sensor.lock_1a2b_lock_uncalibrated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Lock-1A2B Lock uncalibrated', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_1a2b_pullspring_enabled-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -98,7 +126,20 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[entry-semi_locked] +# name: test_binary_sensors[binary_sensor.lock_1a2b_pullspring_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-1A2B Pullspring enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_pullspring_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_1a2b_semi_locked-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -131,48 +172,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[state-charging] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery_charging', - 'friendly_name': 'Lock-1A2B Charging', - }), - 'context': , - 'entity_id': 'binary_sensor.lock_1a2b_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[state-lock_uncalibrated] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Lock-1A2B Lock uncalibrated', - }), - 'context': , - 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[state-pullspring_enabled] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Lock-1A2B Pullspring enabled', - }), - 'context': , - 'entity_id': 'binary_sensor.lock_1a2b_pullspring_enabled', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[state-semi_locked] +# name: test_binary_sensors[binary_sensor.lock_1a2b_semi_locked-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Lock-1A2B Semi locked', @@ -185,3 +185,189 @@ 'state': 'off', }) # --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charging', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '98765-charging', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery_charging', + 'friendly_name': 'Lock-2C3D Charging', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_lock_uncalibrated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_lock_uncalibrated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lock uncalibrated', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uncalibrated', + 'unique_id': '98765-uncalibrated', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_lock_uncalibrated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Lock-2C3D Lock uncalibrated', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_lock_uncalibrated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_pullspring_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_pullspring_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pullspring enabled', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pullspring_enabled', + 'unique_id': '98765-pullspring_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_pullspring_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-2C3D Pullspring enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_pullspring_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_semi_locked-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_2c3d_semi_locked', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Semi locked', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'semi_locked', + 'unique_id': '98765-semi_locked', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lock_2c3d_semi_locked-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-2C3D Semi locked', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_2c3d_semi_locked', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/tedee/snapshots/test_init.ambr b/tests/components/tedee/snapshots/test_init.ambr index 20d6bfcdc2a..af559f561b2 100644 --- a/tests/components/tedee/snapshots/test_init.ambr +++ b/tests/components/tedee/snapshots/test_init.ambr @@ -31,3 +31,35 @@ 'via_device_id': None, }) # --- +# name: test_lock_device + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tedee', + '12345', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tedee', + 'model': 'Tedee PRO', + 'model_id': 'Tedee PRO', + 'name': 'Lock-1A2B', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- diff --git a/tests/components/tedee/snapshots/test_lock.ambr b/tests/components/tedee/snapshots/test_lock.ambr index 3eba6f3f0af..cca988663d2 100644 --- a/tests/components/tedee/snapshots/test_lock.ambr +++ b/tests/components/tedee/snapshots/test_lock.ambr @@ -1,83 +1,4 @@ # serializer version: 1 -# name: test_lock - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Lock-1A2B', - 'supported_features': , - }), - 'context': , - 'entity_id': 'lock.lock_1a2b', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unlocked', - }) -# --- -# name: test_lock.1 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'lock', - 'entity_category': None, - 'entity_id': 'lock.lock_1a2b', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'tedee', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '12345-lock', - 'unit_of_measurement': None, - }) -# --- -# name: test_lock.2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'tedee', - '12345', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Tedee', - 'model': 'Tedee PRO', - 'model_id': 'Tedee PRO', - 'name': 'Lock-1A2B', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': , - }) -# --- # name: test_lock_without_pullspring StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -157,3 +78,97 @@ 'via_device_id': , }) # --- +# name: test_locks[lock.lock_1a2b-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.lock_1a2b', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '12345-lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_locks[lock.lock_1a2b-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-1A2B', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.lock_1a2b', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- +# name: test_locks[lock.lock_2c3d-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.lock_2c3d', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '98765-lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_locks[lock.lock_2c3d-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Lock-2C3D', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.lock_2c3d', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- diff --git a/tests/components/tedee/snapshots/test_sensor.ambr b/tests/components/tedee/snapshots/test_sensor.ambr index d5f4c8361c3..297fe9b0d37 100644 --- a/tests/components/tedee/snapshots/test_sensor.ambr +++ b/tests/components/tedee/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensors[entry-battery] +# name: test_sensors[sensor.lock_1a2b_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -34,7 +34,23 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[entry-pullspring_duration] +# name: test_sensors[sensor.lock_1a2b_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Lock-1A2B Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lock_1a2b_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70', + }) +# --- +# name: test_sensors[sensor.lock_1a2b_pullspring_duration-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -69,23 +85,7 @@ 'unit_of_measurement': , }) # --- -# name: test_sensors[state-battery] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Lock-1A2B Battery', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.lock_1a2b_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '70', - }) -# --- -# name: test_sensors[state-pullspring_duration] +# name: test_sensors[sensor.lock_1a2b_pullspring_duration-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', @@ -101,3 +101,105 @@ 'state': '2', }) # --- +# name: test_sensors[sensor.lock_2c3d_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.lock_2c3d_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '98765-battery_sensor', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.lock_2c3d_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Lock-2C3D Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lock_2c3d_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70', + }) +# --- +# name: test_sensors[sensor.lock_2c3d_pullspring_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.lock_2c3d_pullspring_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pullspring duration', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pullspring_duration', + 'unique_id': '98765-pullspring_duration', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.lock_2c3d_pullspring_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Lock-2C3D Pullspring duration', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.lock_2c3d_pullspring_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- diff --git a/tests/components/tedee/test_binary_sensor.py b/tests/components/tedee/test_binary_sensor.py index dfe70e7a2ea..ccfd12440ea 100644 --- a/tests/components/tedee/test_binary_sensor.py +++ b/tests/components/tedee/test_binary_sensor.py @@ -1,19 +1,20 @@ """Tests for the Tedee Binary Sensors.""" from datetime import timedelta -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import async_fire_time_changed +from . import setup_integration -pytestmark = pytest.mark.usefixtures("init_integration") +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform BINARY_SENSORS = ("charging", "semi_locked", "pullspring_enabled", "lock_uncalibrated") @@ -22,21 +23,19 @@ BINARY_SENSORS = ("charging", "semi_locked", "pullspring_enabled", "lock_uncalib async def test_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: """Test tedee binary sensor.""" - for key in BINARY_SENSORS: - state = hass.states.get(f"binary_sensor.lock_1a2b_{key}") - assert state - assert state == snapshot(name=f"state-{key}") + with patch("homeassistant.components.tedee.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry == snapshot(name=f"entry-{key}") + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.usefixtures("init_integration") async def test_new_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, diff --git a/tests/components/tedee/test_init.py b/tests/components/tedee/test_init.py index 63701bb1788..71bf5262f00 100644 --- a/tests/components/tedee/test_init.py +++ b/tests/components/tedee/test_init.py @@ -20,6 +20,7 @@ from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID, EVENT_HOMEASSISTANT_ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from . import setup_integration from .conftest import WEBHOOK_ID from tests.common import MockConfigEntry @@ -32,9 +33,7 @@ async def test_load_unload_config_entry( mock_tedee: MagicMock, ) -> None: """Test loading and unloading the integration.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -56,9 +55,7 @@ async def test_config_entry_not_ready( """Test the Tedee configuration entry not ready.""" mock_tedee.get_locks.side_effect = side_effect - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert len(mock_tedee.get_locks.mock_calls) == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY @@ -70,9 +67,7 @@ async def test_cleanup_on_shutdown( mock_tedee: MagicMock, ) -> None: """Test the webhook is cleaned up on shutdown.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -88,9 +83,7 @@ async def test_webhook_cleanup_errors( caplog: pytest.LogCaptureFixture, ) -> None: """Test the webhook is cleaned up on shutdown.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -110,9 +103,7 @@ async def test_webhook_registration_errors( ) -> None: """Test the webhook is cleaned up on shutdown.""" mock_tedee.register_webhook.side_effect = TedeeWebhookException("") - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -128,9 +119,7 @@ async def test_webhook_registration_cleanup_errors( ) -> None: """Test the errors during webhook cleanup during registration.""" mock_tedee.cleanup_webhooks_by_host.side_effect = TedeeWebhookException("") - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -138,6 +127,21 @@ async def test_webhook_registration_cleanup_errors( assert "Failed to cleanup Tedee webhooks by host:" in caplog.text +async def test_lock_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_tedee: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure the lock device is registered.""" + await setup_integration(hass, mock_config_entry) + + device = device_registry.async_get_device({(mock_config_entry.domain, "12345")}) + assert device + assert device == snapshot + + async def test_bridge_device( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -146,9 +150,7 @@ async def test_bridge_device( snapshot: SnapshotAssertion, ) -> None: """Ensure the bridge device is registered.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) device = device_registry.async_get_device( {(mock_config_entry.domain, mock_tedee.get_local_bridge.return_value.serial)} @@ -192,9 +194,7 @@ async def test_webhook_post( ) -> None: """Test webhook callback.""" - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) client = await hass_client_no_auth() webhook_url = async_generate_url(hass, WEBHOOK_ID) @@ -241,9 +241,7 @@ async def test_migration( "homeassistant.components.tedee.webhook_generate_id", return_value=WEBHOOK_ID, ): - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) assert mock_config_entry.version == 1 assert mock_config_entry.minor_version == 2 diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index d84acb212ea..e0fe9673a46 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -1,7 +1,7 @@ """Tests for tedee lock.""" from datetime import timedelta -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from urllib.parse import urlparse from aiotedee import TedeeLock, TedeeLockState @@ -22,43 +22,44 @@ from homeassistant.components.lock import ( LockState, ) from homeassistant.components.webhook import async_generate_url -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceNotSupported from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component +from . import setup_integration from .conftest import WEBHOOK_ID -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform from tests.typing import ClientSessionGenerator -pytestmark = pytest.mark.usefixtures("init_integration") - -async def test_lock( +async def test_locks( hass: HomeAssistant, mock_tedee: MagicMock, - device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, +) -> None: + """Test tedee locks.""" + with patch("homeassistant.components.tedee.PLATFORMS", [Platform.LOCK]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("init_integration") +async def test_lock_service_calls( + hass: HomeAssistant, + mock_tedee: MagicMock, ) -> None: """Test the tedee lock.""" - mock_tedee.lock.return_value = None - mock_tedee.unlock.return_value = None - mock_tedee.open.return_value = None - - state = hass.states.get("lock.lock_1a2b") - assert state - assert state == snapshot - - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry == snapshot - assert entry.device_id - - device = device_registry.async_get(entry.device_id) - assert device == snapshot await hass.services.async_call( LOCK_DOMAIN, @@ -106,6 +107,7 @@ async def test_lock( assert state.state == LockState.UNLOCKING +@pytest.mark.usefixtures("init_integration") async def test_lock_without_pullspring( hass: HomeAssistant, mock_tedee: MagicMock, @@ -116,9 +118,6 @@ async def test_lock_without_pullspring( """Test the tedee lock without pullspring.""" # Fetch translations await async_setup_component(hass, "homeassistant", {}) - mock_tedee.lock.return_value = None - mock_tedee.unlock.return_value = None - mock_tedee.open.return_value = None state = hass.states.get("lock.lock_2c3d") assert state @@ -149,6 +148,7 @@ async def test_lock_without_pullspring( assert len(mock_tedee.open.mock_calls) == 0 +@pytest.mark.usefixtures("init_integration") async def test_lock_errors( hass: HomeAssistant, mock_tedee: MagicMock, @@ -191,6 +191,7 @@ async def test_lock_errors( assert exc_info.value.translation_key == "open_failed" +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize( "side_effect", [ @@ -217,6 +218,7 @@ async def test_update_failed( assert state.state == STATE_UNAVAILABLE +@pytest.mark.usefixtures("init_integration") async def test_cleanup_removed_locks( hass: HomeAssistant, mock_tedee: MagicMock, @@ -247,6 +249,7 @@ async def test_cleanup_removed_locks( assert "Lock-1A2B" not in locks +@pytest.mark.usefixtures("init_integration") async def test_new_lock( hass: HomeAssistant, mock_tedee: MagicMock, @@ -275,6 +278,7 @@ async def test_new_lock( assert state +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize( ("lib_state", "expected_state"), [ diff --git a/tests/components/tedee/test_sensor.py b/tests/components/tedee/test_sensor.py index ddbcd5086af..3c03d340100 100644 --- a/tests/components/tedee/test_sensor.py +++ b/tests/components/tedee/test_sensor.py @@ -1,20 +1,20 @@ """Tests for the Tedee Sensors.""" from datetime import timedelta -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import async_fire_time_changed - -pytestmark = pytest.mark.usefixtures("init_integration") +from . import setup_integration +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform SENSORS = ( "battery", @@ -25,21 +25,18 @@ SENSORS = ( async def test_sensors( hass: HomeAssistant, mock_tedee: MagicMock, + mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: """Test tedee sensors.""" - for key in SENSORS: - state = hass.states.get(f"sensor.lock_1a2b_{key}") - assert state - assert state == snapshot(name=f"state-{key}") + with patch("homeassistant.components.tedee.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) - entry = entity_registry.async_get(state.entity_id) - assert entry - assert entry.device_id - assert entry == snapshot(name=f"entry-{key}") + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) +@pytest.mark.usefixtures("init_integration") async def test_new_sensors( hass: HomeAssistant, mock_tedee: MagicMock, From 0030a970a19bbb430861a39bc3cd853bd0ff26bc Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Sun, 15 Dec 2024 21:31:18 +0100 Subject: [PATCH 285/677] Split coordinator in lamarzocco (#133208) --- .../components/lamarzocco/__init__.py | 34 +++-- .../components/lamarzocco/binary_sensor.py | 2 +- homeassistant/components/lamarzocco/button.py | 2 +- .../components/lamarzocco/calendar.py | 2 +- .../components/lamarzocco/coordinator.py | 130 +++++++++--------- .../components/lamarzocco/diagnostics.py | 2 +- homeassistant/components/lamarzocco/number.py | 2 +- homeassistant/components/lamarzocco/select.py | 2 +- homeassistant/components/lamarzocco/sensor.py | 56 +++++--- homeassistant/components/lamarzocco/switch.py | 2 +- homeassistant/components/lamarzocco/update.py | 2 +- tests/components/lamarzocco/conftest.py | 2 +- tests/components/lamarzocco/test_init.py | 4 +- 13 files changed, 138 insertions(+), 104 deletions(-) diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index b3021ef1543..d20616e1940 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -7,6 +7,7 @@ from pylamarzocco.clients.bluetooth import LaMarzoccoBluetoothClient from pylamarzocco.clients.cloud import LaMarzoccoCloudClient from pylamarzocco.clients.local import LaMarzoccoLocalClient from pylamarzocco.const import BT_MODEL_PREFIXES, FirmwareType +from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.components.bluetooth import async_discovered_service_info @@ -25,7 +26,13 @@ from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.aiohttp_client import async_create_clientsession from .const import CONF_USE_BLUETOOTH, DOMAIN -from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator +from .coordinator import ( + LaMarzoccoConfigEntry, + LaMarzoccoConfigUpdateCoordinator, + LaMarzoccoFirmwareUpdateCoordinator, + LaMarzoccoRuntimeData, + LaMarzoccoStatisticsUpdateCoordinator, +) PLATFORMS = [ Platform.BINARY_SENSOR, @@ -99,18 +106,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - address_or_ble_device=entry.data[CONF_MAC], ) - coordinator = LaMarzoccoUpdateCoordinator( - hass=hass, - entry=entry, - local_client=local_client, + device = LaMarzoccoMachine( + model=entry.data[CONF_MODEL], + serial_number=entry.unique_id, + name=entry.data[CONF_NAME], cloud_client=cloud_client, + local_client=local_client, bluetooth_client=bluetooth_client, ) - await coordinator.async_config_entry_first_refresh() - entry.runtime_data = coordinator + coordinators = LaMarzoccoRuntimeData( + LaMarzoccoConfigUpdateCoordinator(hass, entry, device, local_client), + LaMarzoccoFirmwareUpdateCoordinator(hass, entry, device), + LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device), + ) - gateway_version = coordinator.device.firmware[FirmwareType.GATEWAY].current_version + # API does not like concurrent requests, so no asyncio.gather here + await coordinators.config_coordinator.async_config_entry_first_refresh() + await coordinators.firmware_coordinator.async_config_entry_first_refresh() + await coordinators.statistics_coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinators + + gateway_version = device.firmware[FirmwareType.GATEWAY].current_version if version.parse(gateway_version) < version.parse("v3.4-rc5"): # incompatible gateway firmware, create an issue ir.async_create_issue( diff --git a/homeassistant/components/lamarzocco/binary_sensor.py b/homeassistant/components/lamarzocco/binary_sensor.py index 0e11c54d896..3d11992e7c1 100644 --- a/homeassistant/components/lamarzocco/binary_sensor.py +++ b/homeassistant/components/lamarzocco/binary_sensor.py @@ -64,7 +64,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up binary sensor entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoBinarySensorEntity(coordinator, description) diff --git a/homeassistant/components/lamarzocco/button.py b/homeassistant/components/lamarzocco/button.py index dabf01d817d..22e92f656ff 100644 --- a/homeassistant/components/lamarzocco/button.py +++ b/homeassistant/components/lamarzocco/button.py @@ -57,7 +57,7 @@ async def async_setup_entry( ) -> None: """Set up button entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoButtonEntity(coordinator, description) for description in ENTITIES diff --git a/homeassistant/components/lamarzocco/calendar.py b/homeassistant/components/lamarzocco/calendar.py index 46bfe875c9f..1dcc7c324ac 100644 --- a/homeassistant/components/lamarzocco/calendar.py +++ b/homeassistant/components/lamarzocco/calendar.py @@ -36,7 +36,7 @@ async def async_setup_entry( ) -> None: """Set up switch entities and services.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoCalendarEntity(coordinator, CALENDAR_KEY, wake_up_sleep_entry) for wake_up_sleep_entry in coordinator.device.config.wake_up_sleep_entries.values() diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index 1281b11db02..aca84fc4660 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -2,20 +2,18 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine +from abc import abstractmethod +from dataclasses import dataclass from datetime import timedelta import logging -from time import time from typing import Any -from pylamarzocco.clients.bluetooth import LaMarzoccoBluetoothClient -from pylamarzocco.clients.cloud import LaMarzoccoCloudClient from pylamarzocco.clients.local import LaMarzoccoLocalClient from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_MODEL, CONF_NAME, EVENT_HOMEASSISTANT_STOP +from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -23,26 +21,35 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN SCAN_INTERVAL = timedelta(seconds=30) -FIRMWARE_UPDATE_INTERVAL = 3600 -STATISTICS_UPDATE_INTERVAL = 300 - +FIRMWARE_UPDATE_INTERVAL = timedelta(hours=1) +STATISTICS_UPDATE_INTERVAL = timedelta(minutes=5) _LOGGER = logging.getLogger(__name__) -type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoUpdateCoordinator] + +@dataclass +class LaMarzoccoRuntimeData: + """Runtime data for La Marzocco.""" + + config_coordinator: LaMarzoccoConfigUpdateCoordinator + firmware_coordinator: LaMarzoccoFirmwareUpdateCoordinator + statistics_coordinator: LaMarzoccoStatisticsUpdateCoordinator + + +type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoRuntimeData] class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): - """Class to handle fetching data from the La Marzocco API centrally.""" + """Base class for La Marzocco coordinators.""" + _default_update_interval = SCAN_INTERVAL config_entry: LaMarzoccoConfigEntry def __init__( self, hass: HomeAssistant, entry: LaMarzoccoConfigEntry, - cloud_client: LaMarzoccoCloudClient, - local_client: LaMarzoccoLocalClient | None, - bluetooth_client: LaMarzoccoBluetoothClient | None, + device: LaMarzoccoMachine, + local_client: LaMarzoccoLocalClient | None = None, ) -> None: """Initialize coordinator.""" super().__init__( @@ -50,24 +57,35 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): _LOGGER, config_entry=entry, name=DOMAIN, - update_interval=SCAN_INTERVAL, + update_interval=self._default_update_interval, ) + self.device = device self.local_connection_configured = local_client is not None - - assert self.config_entry.unique_id - self.device = LaMarzoccoMachine( - model=self.config_entry.data[CONF_MODEL], - serial_number=self.config_entry.unique_id, - name=self.config_entry.data[CONF_NAME], - cloud_client=cloud_client, - local_client=local_client, - bluetooth_client=bluetooth_client, - ) - - self._last_firmware_data_update: float | None = None - self._last_statistics_data_update: float | None = None self._local_client = local_client + async def _async_update_data(self) -> None: + """Do the data update.""" + try: + await self._internal_async_update_data() + except AuthFail as ex: + _LOGGER.debug("Authentication failed", exc_info=True) + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, translation_key="authentication_failed" + ) from ex + except RequestNotSuccessful as ex: + _LOGGER.debug(ex, exc_info=True) + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="api_error" + ) from ex + + @abstractmethod + async def _internal_async_update_data(self) -> None: + """Actual data update logic.""" + + +class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): + """Class to handle fetching data from the La Marzocco API centrally.""" + async def _async_setup(self) -> None: """Set up the coordinator.""" if self._local_client is not None: @@ -96,41 +114,29 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): ) self.config_entry.async_on_unload(websocket_close) - async def _async_update_data(self) -> None: + async def _internal_async_update_data(self) -> None: """Fetch data from API endpoint.""" - await self._async_handle_request(self.device.get_config) - - if ( - self._last_firmware_data_update is None - or (self._last_firmware_data_update + FIRMWARE_UPDATE_INTERVAL) < time() - ): - await self._async_handle_request(self.device.get_firmware) - self._last_firmware_data_update = time() - - if ( - self._last_statistics_data_update is None - or (self._last_statistics_data_update + STATISTICS_UPDATE_INTERVAL) < time() - ): - await self._async_handle_request(self.device.get_statistics) - self._last_statistics_data_update = time() - + await self.device.get_config() _LOGGER.debug("Current status: %s", str(self.device.config)) - async def _async_handle_request[**_P]( - self, - func: Callable[_P, Coroutine[None, None, None]], - *args: _P.args, - **kwargs: _P.kwargs, - ) -> None: - try: - await func(*args, **kwargs) - except AuthFail as ex: - _LOGGER.debug("Authentication failed", exc_info=True) - raise ConfigEntryAuthFailed( - translation_domain=DOMAIN, translation_key="authentication_failed" - ) from ex - except RequestNotSuccessful as ex: - _LOGGER.debug(ex, exc_info=True) - raise UpdateFailed( - translation_domain=DOMAIN, translation_key="api_error" - ) from ex + +class LaMarzoccoFirmwareUpdateCoordinator(LaMarzoccoUpdateCoordinator): + """Coordinator for La Marzocco firmware.""" + + _default_update_interval = FIRMWARE_UPDATE_INTERVAL + + async def _internal_async_update_data(self) -> None: + """Fetch data from API endpoint.""" + await self.device.get_firmware() + _LOGGER.debug("Current firmware: %s", str(self.device.firmware)) + + +class LaMarzoccoStatisticsUpdateCoordinator(LaMarzoccoUpdateCoordinator): + """Coordinator for La Marzocco statistics.""" + + _default_update_interval = STATISTICS_UPDATE_INTERVAL + + async def _internal_async_update_data(self) -> None: + """Fetch data from API endpoint.""" + await self.device.get_statistics() + _LOGGER.debug("Current statistics: %s", str(self.device.statistics)) diff --git a/homeassistant/components/lamarzocco/diagnostics.py b/homeassistant/components/lamarzocco/diagnostics.py index 43ae51ee192..204a8b7142a 100644 --- a/homeassistant/components/lamarzocco/diagnostics.py +++ b/homeassistant/components/lamarzocco/diagnostics.py @@ -31,7 +31,7 @@ async def async_get_config_entry_diagnostics( entry: LaMarzoccoConfigEntry, ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator device = coordinator.device # collect all data sources diagnostics_data = DiagnosticsData( diff --git a/homeassistant/components/lamarzocco/number.py b/homeassistant/components/lamarzocco/number.py index feeb7e4a282..a1389769194 100644 --- a/homeassistant/components/lamarzocco/number.py +++ b/homeassistant/components/lamarzocco/number.py @@ -210,7 +210,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up number entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator entities: list[NumberEntity] = [ LaMarzoccoNumberEntity(coordinator, description) for description in ENTITIES diff --git a/homeassistant/components/lamarzocco/select.py b/homeassistant/components/lamarzocco/select.py index e6b5f9a3d94..595c157b823 100644 --- a/homeassistant/components/lamarzocco/select.py +++ b/homeassistant/components/lamarzocco/select.py @@ -107,7 +107,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up select entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator async_add_entities( LaMarzoccoSelectEntity(coordinator, description) diff --git a/homeassistant/components/lamarzocco/sensor.py b/homeassistant/components/lamarzocco/sensor.py index 6dda6e69a02..8d57c1b8403 100644 --- a/homeassistant/components/lamarzocco/sensor.py +++ b/homeassistant/components/lamarzocco/sensor.py @@ -33,24 +33,6 @@ class LaMarzoccoSensorEntityDescription( ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( - LaMarzoccoSensorEntityDescription( - key="drink_stats_coffee", - translation_key="drink_stats_coffee", - native_unit_of_measurement="drinks", - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.statistics.drink_stats.get(PhysicalKey.A, 0), - available_fn=lambda device: len(device.statistics.drink_stats) > 0, - entity_category=EntityCategory.DIAGNOSTIC, - ), - LaMarzoccoSensorEntityDescription( - key="drink_stats_flushing", - translation_key="drink_stats_flushing", - native_unit_of_measurement="drinks", - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.statistics.total_flushes, - available_fn=lambda device: len(device.statistics.drink_stats) > 0, - entity_category=EntityCategory.DIAGNOSTIC, - ), LaMarzoccoSensorEntityDescription( key="shot_timer", translation_key="shot_timer", @@ -88,6 +70,27 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( ), ) +STATISTIC_ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( + LaMarzoccoSensorEntityDescription( + key="drink_stats_coffee", + translation_key="drink_stats_coffee", + native_unit_of_measurement="drinks", + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda device: device.statistics.drink_stats.get(PhysicalKey.A, 0), + available_fn=lambda device: len(device.statistics.drink_stats) > 0, + entity_category=EntityCategory.DIAGNOSTIC, + ), + LaMarzoccoSensorEntityDescription( + key="drink_stats_flushing", + translation_key="drink_stats_flushing", + native_unit_of_measurement="drinks", + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda device: device.statistics.total_flushes, + available_fn=lambda device: len(device.statistics.drink_stats) > 0, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -95,14 +98,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensor entities.""" - coordinator = entry.runtime_data + config_coordinator = entry.runtime_data.config_coordinator - async_add_entities( - LaMarzoccoSensorEntity(coordinator, description) + entities = [ + LaMarzoccoSensorEntity(config_coordinator, description) for description in ENTITIES - if description.supported_fn(coordinator) + if description.supported_fn(config_coordinator) + ] + + statistics_coordinator = entry.runtime_data.statistics_coordinator + entities.extend( + LaMarzoccoSensorEntity(statistics_coordinator, description) + for description in STATISTIC_ENTITIES + if description.supported_fn(statistics_coordinator) ) + async_add_entities(entities) + class LaMarzoccoSensorEntity(LaMarzoccoEntity, SensorEntity): """Sensor representing espresso machine temperature data.""" diff --git a/homeassistant/components/lamarzocco/switch.py b/homeassistant/components/lamarzocco/switch.py index 263bb5dc6ec..54bd1ac2aed 100644 --- a/homeassistant/components/lamarzocco/switch.py +++ b/homeassistant/components/lamarzocco/switch.py @@ -68,7 +68,7 @@ async def async_setup_entry( ) -> None: """Set up switch entities and services.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.config_coordinator entities: list[SwitchEntity] = [] entities.extend( diff --git a/homeassistant/components/lamarzocco/update.py b/homeassistant/components/lamarzocco/update.py index ca182909042..0833ee6e249 100644 --- a/homeassistant/components/lamarzocco/update.py +++ b/homeassistant/components/lamarzocco/update.py @@ -59,7 +59,7 @@ async def async_setup_entry( ) -> None: """Create update entities.""" - coordinator = entry.runtime_data + coordinator = entry.runtime_data.firmware_coordinator async_add_entities( LaMarzoccoUpdateEntity(coordinator, description) for description in ENTITIES diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 0bd3fb2a737..997fa73604c 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -143,7 +143,7 @@ def mock_lamarzocco(device_fixture: MachineModel) -> Generator[MagicMock]: with ( patch( - "homeassistant.components.lamarzocco.coordinator.LaMarzoccoMachine", + "homeassistant.components.lamarzocco.LaMarzoccoMachine", autospec=True, ) as lamarzocco_mock, ): diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 80c038c4948..446c8780b62 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -174,9 +174,7 @@ async def test_bluetooth_is_set_from_discovery( "homeassistant.components.lamarzocco.async_discovered_service_info", return_value=[service_info], ) as discovery, - patch( - "homeassistant.components.lamarzocco.coordinator.LaMarzoccoMachine" - ) as init_device, + patch("homeassistant.components.lamarzocco.LaMarzoccoMachine") as init_device, ): await async_init_integration(hass, mock_config_entry) discovery.assert_called_once() From e24dc3325905079d515439edf514a52ee7661f67 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Sun, 15 Dec 2024 15:45:50 -0500 Subject: [PATCH 286/677] Conversation: Use [] when we know key exists (#133305) --- homeassistant/components/conversation/http.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/conversation/http.py b/homeassistant/components/conversation/http.py index d9873c5cbce..8134ecb0eee 100644 --- a/homeassistant/components/conversation/http.py +++ b/homeassistant/components/conversation/http.py @@ -24,7 +24,7 @@ from .agent_manager import ( get_agent_manager, ) from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY -from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE, DefaultAgent +from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE from .entity import ConversationEntity from .models import ConversationInput @@ -162,8 +162,7 @@ async def websocket_list_sentences( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """List custom registered sentences.""" - agent = hass.data.get(DATA_DEFAULT_ENTITY) - assert isinstance(agent, DefaultAgent) + agent = hass.data[DATA_DEFAULT_ENTITY] sentences = [] for trigger_data in agent.trigger_sentences: @@ -185,8 +184,7 @@ async def websocket_hass_agent_debug( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Return intents that would be matched by the default agent for a list of sentences.""" - agent = hass.data.get(DATA_DEFAULT_ENTITY) - assert isinstance(agent, DefaultAgent) + agent = hass.data[DATA_DEFAULT_ENTITY] # Return results for each sentence in the same order as the input. result_dicts: list[dict[str, Any] | None] = [] From 66dcd38701283e9e04d7eaa8257ad1d94448f6a6 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 16 Dec 2024 08:10:37 +0100 Subject: [PATCH 287/677] Update docker base image to 2024.12.1 (#133323) --- build.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.yaml b/build.yaml index a8755bbbf5c..fafdd876f75 100644 --- a/build.yaml +++ b/build.yaml @@ -1,10 +1,10 @@ image: ghcr.io/home-assistant/{arch}-homeassistant build_from: - aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0 - armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0 - armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0 - amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0 - i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0 + aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.1 + armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.1 + armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.1 + amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.1 + i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.1 codenotary: signer: notary@home-assistant.io base_image: notary@home-assistant.io From 909eb045cc0098749824d462c2876a50b88b32d5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 08:27:10 +0100 Subject: [PATCH 288/677] Set default min/max color temperature in abode lights (#133331) --- homeassistant/components/abode/light.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/homeassistant/components/abode/light.py b/homeassistant/components/abode/light.py index 9b21ee4eb74..e2d0a331f0a 100644 --- a/homeassistant/components/abode/light.py +++ b/homeassistant/components/abode/light.py @@ -11,6 +11,8 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, ) @@ -40,6 +42,8 @@ class AbodeLight(AbodeDevice, LightEntity): _device: Light _attr_name = None + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN def turn_on(self, **kwargs: Any) -> None: """Turn on the light.""" From 5f2b1bd62282d0d55d1ad1e2c8ed00de30bacb15 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 08:45:59 +0100 Subject: [PATCH 289/677] Set default min/max color temperature in demo lights (#133330) --- homeassistant/components/demo/light.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/homeassistant/components/demo/light.py b/homeassistant/components/demo/light.py index 8bb4e403c3d..ec98a056b3e 100644 --- a/homeassistant/components/demo/light.py +++ b/homeassistant/components/demo/light.py @@ -13,6 +13,8 @@ from homeassistant.components.light import ( ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, ATTR_WHITE, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, LightEntityFeature, @@ -100,6 +102,9 @@ class DemoLight(LightEntity): _attr_name = None _attr_should_poll = False + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + def __init__( self, unique_id: str, From 4566ebbb3dd016b35fb6204fa33601109b11f2cb Mon Sep 17 00:00:00 2001 From: Chris Talkington Date: Mon, 16 Dec 2024 01:51:01 -0600 Subject: [PATCH 290/677] Add reconfigure flow to Roku (#132986) * add reconfigure flow to roku * Update strings.json * aimplify * Apply suggestions from code review Co-authored-by: Josef Zweck * Update test_config_flow.py * Update config_flow.py * Update config_flow.py --------- Co-authored-by: Josef Zweck --- homeassistant/components/roku/config_flow.py | 43 +++++++++++-- homeassistant/components/roku/strings.json | 4 +- tests/components/roku/test_config_flow.py | 66 +++++++++++++++++++- 3 files changed, 103 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/roku/config_flow.py b/homeassistant/components/roku/config_flow.py index b92ff819701..bc0092d6953 100644 --- a/homeassistant/components/roku/config_flow.py +++ b/homeassistant/components/roku/config_flow.py @@ -10,7 +10,12 @@ from rokuecp import Roku, RokuError import voluptuous as vol from homeassistant.components import ssdp, zeroconf -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -53,20 +58,38 @@ class RokuConfigFlow(ConfigFlow, domain=DOMAIN): self.discovery_info = {} @callback - def _show_form(self, errors: dict[str, Any] | None = None) -> ConfigFlowResult: + def _show_form( + self, + user_input: dict[str, Any] | None, + errors: dict[str, Any] | None = None, + ) -> ConfigFlowResult: """Show the form to the user.""" + suggested_values = user_input + if suggested_values is None and self.source == SOURCE_RECONFIGURE: + suggested_values = { + CONF_HOST: self._get_reconfigure_entry().data[CONF_HOST] + } + return self.async_show_form( step_id="user", - data_schema=DATA_SCHEMA, + data_schema=self.add_suggested_values_to_schema( + DATA_SCHEMA, suggested_values + ), errors=errors or {}, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + return await self.async_step_user(user_input) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if not user_input: - return self._show_form() + return self._show_form(user_input) errors = {} @@ -75,13 +98,21 @@ class RokuConfigFlow(ConfigFlow, domain=DOMAIN): except RokuError: _LOGGER.debug("Roku Error", exc_info=True) errors["base"] = ERROR_CANNOT_CONNECT - return self._show_form(errors) + return self._show_form(user_input, errors) except Exception: _LOGGER.exception("Unknown error trying to connect") return self.async_abort(reason=ERROR_UNKNOWN) await self.async_set_unique_id(info["serial_number"]) - self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]}) + + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="wrong_device") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates={CONF_HOST: user_input[CONF_HOST]}, + ) + + self._abort_if_unique_id_configured() return self.async_create_entry(title=info["title"], data=user_input) diff --git a/homeassistant/components/roku/strings.json b/homeassistant/components/roku/strings.json index 9d657be6d61..bd47585db1b 100644 --- a/homeassistant/components/roku/strings.json +++ b/homeassistant/components/roku/strings.json @@ -21,7 +21,9 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unknown": "[%key:common::config_flow::error::unknown%]", + "wrong_device": "This Roku device does not match the existing device id. Please make sure you entered the correct host information." } }, "options": { diff --git a/tests/components/roku/test_config_flow.py b/tests/components/roku/test_config_flow.py index 7144c77cad9..57ddf5d51a6 100644 --- a/tests/components/roku/test_config_flow.py +++ b/tests/components/roku/test_config_flow.py @@ -1,13 +1,18 @@ """Test the Roku config flow.""" import dataclasses -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock import pytest -from rokuecp import RokuConnectionError +from rokuecp import Device as RokuDevice, RokuConnectionError from homeassistant.components.roku.const import CONF_PLAY_MEDIA_APP_ID, DOMAIN -from homeassistant.config_entries import SOURCE_HOMEKIT, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_HOMEKIT, + SOURCE_SSDP, + SOURCE_USER, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -23,6 +28,8 @@ from . import ( from tests.common import MockConfigEntry +RECONFIGURE_HOST = "192.168.1.190" + async def test_duplicate_error( hass: HomeAssistant, @@ -276,3 +283,56 @@ async def test_options_flow( assert result2.get("data") == { CONF_PLAY_MEDIA_APP_ID: "782875", } + + +async def _start_reconfigure_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "user" + + return await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + {CONF_HOST: RECONFIGURE_HOST}, + ) + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_roku_config_flow: MagicMock, +) -> None: + """Test reconfigure flow.""" + result = await _start_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.data == { + CONF_HOST: RECONFIGURE_HOST, + } + + +async def test_reconfigure_unique_id_mismatch( + hass: HomeAssistant, + mock_device: RokuDevice, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_roku_config_flow: MagicMock, +) -> None: + """Ensure reconfigure flow aborts when the device changes.""" + mock_device.info.serial_number = "RECONFIG" + + result = await _start_reconfigure_flow(hass, mock_config_entry) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_device" From 22d03afb9b5c5142d4ac944b4903a1e6d13c9c82 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 09:08:37 +0100 Subject: [PATCH 291/677] Set default min/max color temperature in wemo lights (#133338) --- homeassistant/components/wemo/light.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/homeassistant/components/wemo/light.py b/homeassistant/components/wemo/light.py index b39f4829605..6068cd3ff0b 100644 --- a/homeassistant/components/wemo/light.py +++ b/homeassistant/components/wemo/light.py @@ -11,6 +11,8 @@ from homeassistant.components.light import ( ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_TRANSITION, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, LightEntityFeature, @@ -77,6 +79,8 @@ def async_setup_bridge( class WemoLight(WemoEntity, LightEntity): """Representation of a WeMo light.""" + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN _attr_supported_features = LightEntityFeature.TRANSITION def __init__(self, coordinator: DeviceCoordinator, light: BridgeLight) -> None: From 06f6869da5dfaf0fcfeda28231ac2b7ea64297b1 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 16 Dec 2024 09:47:49 +0100 Subject: [PATCH 292/677] Avoid string manipulations in hassio backup reader/writer (#133339) --- homeassistant/components/hassio/backup.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 53f3a226a09..e544a56a3c8 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -175,7 +175,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): hassio_agents: list[SupervisorBackupAgent] = [ cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) for agent_id in agent_ids - if agent_id.startswith(DOMAIN) + if manager.backup_agents[agent_id].domain == DOMAIN ] locations = {agent.location for agent in hassio_agents} @@ -254,7 +254,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): hassio_agents: list[SupervisorBackupAgent] = [ cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) for agent_id in agent_ids - if agent_id.startswith(DOMAIN) + if manager.backup_agents[agent_id].domain == DOMAIN ] locations = {agent.location for agent in hassio_agents} @@ -305,7 +305,8 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): else None ) - if not agent_id.startswith(DOMAIN): + manager = self._hass.data[DATA_MANAGER] + if manager.backup_agents[agent_id].domain != DOMAIN: # Download the backup to the supervisor. Supervisor will clean up the backup # two days after the restore is done. await self.async_receive_backup( From f2674f32623492d0b8a75d9293b456dc801997fb Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 09:49:18 +0100 Subject: [PATCH 293/677] Set default min/max color temperature in deconz lights (#133333) --- homeassistant/components/deconz/light.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/homeassistant/components/deconz/light.py b/homeassistant/components/deconz/light.py index acfbff98297..b1df32efc31 100644 --- a/homeassistant/components/deconz/light.py +++ b/homeassistant/components/deconz/light.py @@ -18,6 +18,8 @@ from homeassistant.components.light import ( ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, DOMAIN as LIGHT_DOMAIN, EFFECT_COLORLOOP, FLASH_LONG, @@ -191,6 +193,8 @@ class DeconzBaseLight[_LightDeviceT: Group | Light]( TYPE = LIGHT_DOMAIN _attr_color_mode = ColorMode.UNKNOWN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN def __init__(self, device: _LightDeviceT, hub: DeconzHub) -> None: """Set up light.""" From d78a24ba33b9ac8918ebe000849997a5fd77aef7 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Mon, 16 Dec 2024 09:54:01 +0100 Subject: [PATCH 294/677] Use `ConfigEntry.runtime_data` in Twitch (#133337) * Use `ConfigEntry.runtime_data` in Twitch * Process code review * Process code review --- homeassistant/components/twitch/__init__.py | 14 ++++++-------- homeassistant/components/twitch/coordinator.py | 11 +++++++++-- homeassistant/components/twitch/sensor.py | 9 +++------ tests/components/twitch/__init__.py | 2 +- tests/components/twitch/test_sensor.py | 2 +- 5 files changed, 20 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/twitch/__init__.py b/homeassistant/components/twitch/__init__.py index 6979a016447..22a1782f594 100644 --- a/homeassistant/components/twitch/__init__.py +++ b/homeassistant/components/twitch/__init__.py @@ -7,7 +7,6 @@ from typing import cast from aiohttp.client_exceptions import ClientError, ClientResponseError from twitchAPI.twitch import Twitch -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady @@ -17,11 +16,11 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( async_get_config_entry_implementation, ) -from .const import DOMAIN, OAUTH_SCOPES, PLATFORMS -from .coordinator import TwitchCoordinator +from .const import OAUTH_SCOPES, PLATFORMS +from .coordinator import TwitchConfigEntry, TwitchCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: TwitchConfigEntry) -> bool: """Set up Twitch from a config entry.""" implementation = cast( LocalOAuth2Implementation, @@ -47,18 +46,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: client.auto_refresh_auth = False await client.set_user_authentication(access_token, scope=OAUTH_SCOPES) - coordinator = TwitchCoordinator(hass, client, session) - + coordinator = TwitchCoordinator(hass, client, session, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TwitchConfigEntry) -> bool: """Unload Twitch config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/twitch/coordinator.py b/homeassistant/components/twitch/coordinator.py index c34eeaa5325..c61e80bd2b8 100644 --- a/homeassistant/components/twitch/coordinator.py +++ b/homeassistant/components/twitch/coordinator.py @@ -15,6 +15,8 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import CONF_CHANNELS, DOMAIN, LOGGER, OAUTH_SCOPES +type TwitchConfigEntry = ConfigEntry[TwitchCoordinator] + def chunk_list(lst: list, chunk_size: int) -> list[list]: """Split a list into chunks of chunk_size.""" @@ -44,12 +46,16 @@ class TwitchUpdate: class TwitchCoordinator(DataUpdateCoordinator[dict[str, TwitchUpdate]]): """Class to manage fetching Twitch data.""" - config_entry: ConfigEntry + config_entry: TwitchConfigEntry users: list[TwitchUser] current_user: TwitchUser def __init__( - self, hass: HomeAssistant, twitch: Twitch, session: OAuth2Session + self, + hass: HomeAssistant, + twitch: Twitch, + session: OAuth2Session, + entry: TwitchConfigEntry, ) -> None: """Initialize the coordinator.""" self.twitch = twitch @@ -58,6 +64,7 @@ class TwitchCoordinator(DataUpdateCoordinator[dict[str, TwitchUpdate]]): LOGGER, name=DOMAIN, update_interval=timedelta(minutes=5), + config_entry=entry, ) self.session = session diff --git a/homeassistant/components/twitch/sensor.py b/homeassistant/components/twitch/sensor.py index f78d33ea461..b407eae0319 100644 --- a/homeassistant/components/twitch/sensor.py +++ b/homeassistant/components/twitch/sensor.py @@ -5,15 +5,12 @@ from __future__ import annotations from typing import Any from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import TwitchCoordinator -from .const import DOMAIN -from .coordinator import TwitchUpdate +from .coordinator import TwitchConfigEntry, TwitchCoordinator, TwitchUpdate ATTR_GAME = "game" ATTR_TITLE = "title" @@ -34,11 +31,11 @@ PARALLEL_UPDATES = 1 async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: TwitchConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Initialize entries.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( TwitchSensor(coordinator, channel_id) for channel_id in coordinator.data diff --git a/tests/components/twitch/__init__.py b/tests/components/twitch/__init__.py index 2d70aaf9649..1887861f6e5 100644 --- a/tests/components/twitch/__init__.py +++ b/tests/components/twitch/__init__.py @@ -5,7 +5,7 @@ from typing import Any, Generic, TypeVar from twitchAPI.object.base import TwitchObject -from homeassistant.components.twitch import DOMAIN +from homeassistant.components.twitch.const import DOMAIN from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_json_array_fixture diff --git a/tests/components/twitch/test_sensor.py b/tests/components/twitch/test_sensor.py index 613c0919c49..c8cc009f3e1 100644 --- a/tests/components/twitch/test_sensor.py +++ b/tests/components/twitch/test_sensor.py @@ -7,7 +7,7 @@ from dateutil.tz import tzutc from twitchAPI.object.api import FollowedChannel, Stream, UserSubscription from twitchAPI.type import TwitchResourceNotFound -from homeassistant.components.twitch import DOMAIN +from homeassistant.components.twitch.const import DOMAIN from homeassistant.core import HomeAssistant from . import TwitchIterObject, get_generator_from_data, setup_integration From 9667a120309f566a85df8278ccd0da0bee1b926a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 10:32:57 +0100 Subject: [PATCH 295/677] Set default min/max color temperature in matter lights (#133340) --- homeassistant/components/matter/light.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 153e154e64e..c9d5c688f69 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -13,6 +13,8 @@ from homeassistant.components.light import ( ATTR_HS_COLOR, ATTR_TRANSITION, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, LightEntityDescription, @@ -91,6 +93,8 @@ class MatterLight(MatterEntity, LightEntity): _supports_color_temperature = False _transitions_disabled = False _platform_translation_key = "light" + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN async def _set_xy_color( self, xy_color: tuple[float, float], transition: float = 0.0 From d062171be3e5dfdaa310b5e4f4f16a72a3e265d6 Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Mon, 16 Dec 2024 12:19:21 +0100 Subject: [PATCH 296/677] Suez_water: mark reached bronze scale level (#133352) --- homeassistant/components/suez_water/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/suez_water/manifest.json b/homeassistant/components/suez_water/manifest.json index 7e720a86afd..f39411e8afa 100644 --- a/homeassistant/components/suez_water/manifest.json +++ b/homeassistant/components/suez_water/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/suez_water", "iot_class": "cloud_polling", "loggers": ["pysuez", "regex"], + "quality_scale": "bronze", "requirements": ["pysuezV2==1.3.5"] } From 4b3893eadf2488d5c7507a03138e8b2bb91cfdfe Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 12:26:29 +0100 Subject: [PATCH 297/677] Set default min/max color temperature in homekit_controller lights (#133334) --- .../components/homekit_controller/light.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/homekit_controller/light.py b/homeassistant/components/homekit_controller/light.py index d8c48d81333..26f10768aa0 100644 --- a/homeassistant/components/homekit_controller/light.py +++ b/homeassistant/components/homekit_controller/light.py @@ -12,6 +12,8 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ColorMode, LightEntity, ) @@ -53,6 +55,9 @@ async def async_setup_entry( class HomeKitLight(HomeKitEntity, LightEntity): """Representation of a Homekit light.""" + _attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN + _attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN + @callback def _async_reconfigure(self) -> None: """Reconfigure entity.""" @@ -98,24 +103,24 @@ class HomeKitLight(HomeKitEntity, LightEntity): def max_color_temp_kelvin(self) -> int: """Return the coldest color_temp_kelvin that this light supports.""" if not self.service.has(CharacteristicsTypes.COLOR_TEMPERATURE): - return super().max_color_temp_kelvin + return DEFAULT_MAX_KELVIN min_value_mireds = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].minValue return ( color_util.color_temperature_mired_to_kelvin(min_value_mireds) if min_value_mireds - else super().max_color_temp_kelvin + else DEFAULT_MAX_KELVIN ) @cached_property def min_color_temp_kelvin(self) -> int: """Return the warmest color_temp_kelvin that this light supports.""" if not self.service.has(CharacteristicsTypes.COLOR_TEMPERATURE): - return super().min_color_temp_kelvin + return DEFAULT_MIN_KELVIN max_value_mireds = self.service[CharacteristicsTypes.COLOR_TEMPERATURE].maxValue return ( color_util.color_temperature_mired_to_kelvin(max_value_mireds) if max_value_mireds - else super().min_color_temp_kelvin + else DEFAULT_MIN_KELVIN ) @property From cd2cc1d99fa362e8d2f67840e5224f3ceca15723 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 13:10:15 +0100 Subject: [PATCH 298/677] Reduce false-positives in test-before-setup IQS check (#133349) --- .../test_before_setup.py | 41 ++++++++++++------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/script/hassfest/quality_scale_validation/test_before_setup.py b/script/hassfest/quality_scale_validation/test_before_setup.py index db737c99e37..5f21a9d2458 100644 --- a/script/hassfest/quality_scale_validation/test_before_setup.py +++ b/script/hassfest/quality_scale_validation/test_before_setup.py @@ -15,13 +15,31 @@ _VALID_EXCEPTIONS = { } -def _raises_exception(async_setup_entry_function: ast.AsyncFunctionDef) -> bool: - """Check that a valid exception is raised within `async_setup_entry`.""" - for node in ast.walk(async_setup_entry_function): - if isinstance(node, ast.Raise): - if isinstance(node.exc, ast.Name) and node.exc.id in _VALID_EXCEPTIONS: - return True - if isinstance(node.exc, ast.Call) and node.exc.func.id in _VALID_EXCEPTIONS: +def _get_exception_name(expression: ast.expr) -> str: + """Get the name of the exception being raised.""" + if isinstance(expression, ast.Name): + return expression.id + + if isinstance(expression, ast.Call): + return _get_exception_name(expression.func) + + if isinstance(expression, ast.Attribute): + return _get_exception_name(expression.value) + + raise AssertionError( + f"Raise is neither Attribute nor Call nor Name: {type(expression)}" + ) + + +def _raises_exception(integration: Integration) -> bool: + """Check that a valid exception is raised.""" + for module_file in integration.path.rglob("*.py"): + module = ast_parse_module(module_file) + for node in ast.walk(module): + if ( + isinstance(node, ast.Raise) + and _get_exception_name(node.exc) in _VALID_EXCEPTIONS + ): return True return False @@ -59,11 +77,6 @@ def validate( if not (async_setup_entry := _get_setup_entry_function(init)): return [f"Could not find `async_setup_entry` in {init_file}"] - if not ( - _raises_exception(async_setup_entry) or _calls_first_refresh(async_setup_entry) - ): - return [ - f"Integration does not raise one of {_VALID_EXCEPTIONS} " - f"in async_setup_entry ({init_file})" - ] + if not (_calls_first_refresh(async_setup_entry) or _raises_exception(integration)): + return [f"Integration does not raise one of {_VALID_EXCEPTIONS}"] return None From 739832691e16c078eb6f96ce16c2f05f9df1bf46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Mon, 16 Dec 2024 12:14:01 +0000 Subject: [PATCH 299/677] Add Idasen Desk quality scale record (#132368) --- .../components/idasen_desk/quality_scale.yaml | 108 ++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 108 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/idasen_desk/quality_scale.yaml diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml new file mode 100644 index 00000000000..28381f98a3e --- /dev/null +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -0,0 +1,108 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: + status: exempt + comment: | + This integration does not use polling. + brands: done + common-modules: + status: todo + comment: | + The cover and sensor entities could move common initialization to a base entity class. + config-flow-test-coverage: + status: todo + comment: | + - use mock_desk_api + - merge test_user_step_auth_failed, test_user_step_cannot_connect and test_user_step_unknown_exception. + config-flow: + status: todo + comment: | + Missing data description for user step. + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: todo + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: todo + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not provide configuration parameters. + docs-installation-parameters: + status: exempt + comment: | + This integration does not provide installation parameters. + entity-unavailable: done + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: + status: todo + comment: | + - remove the await hass.async_block_till_done() after service calls with blocking=True + - use constants (like SERVICE_PRESS and ATTR_ENTITY_ID) in the tests calling services + - rename test_buttons.py -> test_button.py + - rename test_sensors.py -> test_sensor.py + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration uses Bluetooth and addresses don't change. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: + status: exempt + comment: | + This integration doesn't have any cases where a reconfiguration is needed. + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: exempt + comment: | + This integration has a fixed single device. + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration doesn't use websession. + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 23721d31fec..e0992914626 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -516,7 +516,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "iaqualink", "ibeacon", "icloud", - "idasen_desk", "idteck_prox", "ifttt", "iglo", From 34911a78bd93a3c375f1d2afcbb80eea0de1f3b1 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Mon, 16 Dec 2024 13:17:38 +0100 Subject: [PATCH 300/677] Add Habitica quality scale record (#131429) Co-authored-by: Franck Nijhof Co-authored-by: Joost Lekkerkerker --- .../components/habitica/quality_scale.yaml | 84 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/habitica/quality_scale.yaml diff --git a/homeassistant/components/habitica/quality_scale.yaml b/homeassistant/components/habitica/quality_scale.yaml new file mode 100644 index 00000000000..cf54672bfed --- /dev/null +++ b/homeassistant/components/habitica/quality_scale.yaml @@ -0,0 +1,84 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: test already_configured, tests should finish with create_entry or abort, assert unique_id + config-flow: done + dependency-transparency: todo + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: No events are registered by the integration. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: There is no options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: Integration represents a service + discovery: + status: exempt + comment: Integration represents a service + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: + status: exempt + comment: No supportable devices. + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + Integration is a service, no devices that could be added at runtime. + Button entities for casting skills are created/removed dynamically if unlocked or on class change + entity-category: + status: done + comment: Default categories are appropriate for currently available entities. + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: + status: todo + comment: translations for UpdateFailed missing + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: done + comment: Used to inform of deprecated entities and actions. + stale-devices: + status: done + comment: Not applicable. Only one device per config entry. Removed together with the config entry. + + # Platinum + async-dependency: todo + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index e0992914626..604ce5e51ea 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -473,7 +473,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "gstreamer", "gtfs", "guardian", - "habitica", "harman_kardon_avr", "harmony", "hassio", From 836fd94a5633e7dd3a9879e6293e9878078a9a89 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 16 Dec 2024 13:31:13 +0100 Subject: [PATCH 301/677] Record current IQS state for LaMetric (#133040) --- .../components/lametric/quality_scale.yaml | 75 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/lametric/quality_scale.yaml diff --git a/homeassistant/components/lametric/quality_scale.yaml b/homeassistant/components/lametric/quality_scale.yaml new file mode 100644 index 00000000000..a8982bb938b --- /dev/null +++ b/homeassistant/components/lametric/quality_scale.yaml @@ -0,0 +1,75 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: todo + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: + status: todo + comment: | + Device are documented, but some are missing. For example, the their pro + strip is supported as well. + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: todo + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 604ce5e51ea..43b4adc90e9 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -576,7 +576,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "kwb", "lacrosse", "lacrosse_view", - "lametric", "landisgyr_heat_meter", "lannouncer", "lastfm", From cc27c95bada7b7e8c0174b9027e9f0f324a87adc Mon Sep 17 00:00:00 2001 From: Guido Schmitz Date: Mon, 16 Dec 2024 13:35:55 +0100 Subject: [PATCH 302/677] Use unique_id in devolo Home Network tests (#133147) --- tests/components/devolo_home_network/__init__.py | 9 +++++++-- .../snapshots/test_diagnostics.ambr | 2 +- .../components/devolo_home_network/test_config_flow.py | 10 +++------- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/tests/components/devolo_home_network/__init__.py b/tests/components/devolo_home_network/__init__.py index 05ccbca0c56..f6d1c13299a 100644 --- a/tests/components/devolo_home_network/__init__.py +++ b/tests/components/devolo_home_network/__init__.py @@ -4,7 +4,7 @@ from homeassistant.components.devolo_home_network.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD from homeassistant.core import HomeAssistant -from .const import IP +from .const import DISCOVERY_INFO, IP from tests.common import MockConfigEntry @@ -15,7 +15,12 @@ def configure_integration(hass: HomeAssistant) -> MockConfigEntry: CONF_IP_ADDRESS: IP, CONF_PASSWORD: "test", } - entry = MockConfigEntry(domain=DOMAIN, data=config, entry_id="123456") + entry = MockConfigEntry( + domain=DOMAIN, + data=config, + entry_id="123456", + unique_id=DISCOVERY_INFO.properties["SN"], + ) entry.add_to_hass(hass) return entry diff --git a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr index 8fe6c7c2293..1288b7f3ef6 100644 --- a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr @@ -35,7 +35,7 @@ 'subentries': list([ ]), 'title': 'Mock Title', - 'unique_id': None, + 'unique_id': '1234567890', 'version': 1, }), }) diff --git a/tests/components/devolo_home_network/test_config_flow.py b/tests/components/devolo_home_network/test_config_flow.py index 28e9059d588..92163b5cb95 100644 --- a/tests/components/devolo_home_network/test_config_flow.py +++ b/tests/components/devolo_home_network/test_config_flow.py @@ -29,8 +29,6 @@ from .const import ( ) from .mock import MockDevice -from tests.common import MockConfigEntry - async def test_form(hass: HomeAssistant, info: dict[str, Any]) -> None: """Test we get the form.""" @@ -125,6 +123,8 @@ async def test_zeroconf(hass: HomeAssistant) -> None: CONF_IP_ADDRESS: IP, CONF_PASSWORD: "", } + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["result"].unique_id == "1234567890" async def test_abort_zeroconf_wrong_device(hass: HomeAssistant) -> None: @@ -141,11 +141,7 @@ async def test_abort_zeroconf_wrong_device(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("info") async def test_abort_if_configured(hass: HomeAssistant) -> None: """Test we abort config flow if already configured.""" - serial_number = DISCOVERY_INFO.properties["SN"] - entry = MockConfigEntry( - domain=DOMAIN, unique_id=serial_number, data={CONF_IP_ADDRESS: IP} - ) - entry.add_to_hass(hass) + entry = configure_integration(hass) # Abort on concurrent user flow result = await hass.config_entries.flow.async_init( From 0a0f4827020e88a4804a23566d1b6ca45c6811d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Mon, 16 Dec 2024 13:39:46 +0100 Subject: [PATCH 303/677] Update myuplink quality scale (#133083) Updated documentation --- homeassistant/components/myuplink/quality_scale.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml index ef64ce757f5..dbe771f7eb2 100644 --- a/homeassistant/components/myuplink/quality_scale.yaml +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -61,12 +61,12 @@ rules: comment: | Not possible to discover these devices. docs-data-update: done - docs-examples: todo + docs-examples: done docs-known-limitations: done - docs-supported-devices: todo + docs-supported-devices: done docs-supported-functions: todo docs-troubleshooting: done - docs-use-cases: todo + docs-use-cases: done dynamic-devices: todo entity-category: done entity-device-class: done From 38fdfba1693849792b6f75b06c6952c513a58f45 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Mon, 16 Dec 2024 13:56:17 +0100 Subject: [PATCH 304/677] Velbus finish config-flow-test-coverage (#133149) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/velbus/quality_scale.yaml | 5 +---- tests/components/velbus/test_config_flow.py | 8 +++++++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index 37e55fee19c..9a48e84da93 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -7,10 +7,7 @@ rules: This integration does not poll. brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: | - Split test_flow_usb from the test that tests already_configured, test_flow_usb should also assert the unique_id of the entry + config-flow-test-coverage: done config-flow: status: todo comment: | diff --git a/tests/components/velbus/test_config_flow.py b/tests/components/velbus/test_config_flow.py index 432fcea10db..5e81a3f8a36 100644 --- a/tests/components/velbus/test_config_flow.py +++ b/tests/components/velbus/test_config_flow.py @@ -156,12 +156,18 @@ async def test_flow_usb(hass: HomeAssistant) -> None: user_input={}, ) assert result + assert result["result"].unique_id == "0B1B:10CF_1234_Velleman_Velbus VMB1USB" assert result.get("type") is FlowResultType.CREATE_ENTRY - # test an already configured discovery + +@pytest.mark.usefixtures("controller") +@patch("serial.tools.list_ports.comports", MagicMock(return_value=[com_port()])) +async def test_flow_usb_if_already_setup(hass: HomeAssistant) -> None: + """Test we abort if Velbus USB discovbery aborts in case it is already setup.""" entry = MockConfigEntry( domain=DOMAIN, data={CONF_PORT: PORT_SERIAL}, + unique_id="0B1B:10CF_1234_Velleman_Velbus VMB1USB", ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( From a953abf5c3ea000f52f934d711dfe47650645b95 Mon Sep 17 00:00:00 2001 From: Assaf Inbal Date: Mon, 16 Dec 2024 15:00:06 +0200 Subject: [PATCH 305/677] Add reauth flow to Ituran (#132755) --- .../components/ituran/config_flow.py | 36 ++++++++++++++-- .../components/ituran/coordinator.py | 4 +- .../components/ituran/quality_scale.yaml | 2 +- homeassistant/components/ituran/strings.json | 11 +++-- tests/components/ituran/test_config_flow.py | 43 +++++++++++++++++++ 5 files changed, 86 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/ituran/config_flow.py b/homeassistant/components/ituran/config_flow.py index 48e898a9d0a..9709e471503 100644 --- a/homeassistant/components/ituran/config_flow.py +++ b/homeassistant/components/ituran/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping import logging from typing import Any @@ -9,7 +10,7 @@ from pyituran import Ituran from pyituran.exceptions import IturanApiError, IturanAuthError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from .const import ( CONF_ID_OR_PASSPORT, @@ -43,11 +44,12 @@ class IturanConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the inial step.""" + """Handle the initial step.""" errors: dict[str, str] = {} if user_input is not None: await self.async_set_unique_id(user_input[CONF_ID_OR_PASSPORT]) - self._abort_if_unique_id_configured() + if self.source != SOURCE_REAUTH: + self._abort_if_unique_id_configured() ituran = Ituran( user_input[CONF_ID_OR_PASSPORT], @@ -81,7 +83,7 @@ class IturanConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_otp( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the inial step.""" + """Handle the OTP step.""" errors: dict[str, str] = {} if user_input is not None: ituran = Ituran( @@ -99,6 +101,10 @@ class IturanConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: + if self.source == SOURCE_REAUTH: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=self._user_info + ) return self.async_create_entry( title=f"Ituran {self._user_info[CONF_ID_OR_PASSPORT]}", data=self._user_info, @@ -107,3 +113,25 @@ class IturanConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="otp", data_schema=STEP_OTP_DATA_SCHEMA, errors=errors ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + self._user_info = dict(entry_data) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reauth confirmation message.""" + if user_input is not None: + return await self.async_step_user(self._user_info) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=vol.Schema({}), + description_placeholders={ + "phone_number": self._user_info[CONF_PHONE_NUMBER] + }, + ) diff --git a/homeassistant/components/ituran/coordinator.py b/homeassistant/components/ituran/coordinator.py index 93d07b71267..cd0949eb4c2 100644 --- a/homeassistant/components/ituran/coordinator.py +++ b/homeassistant/components/ituran/coordinator.py @@ -7,7 +7,7 @@ from pyituran.exceptions import IturanApiError, IturanAuthError from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -54,7 +54,7 @@ class IturanDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Vehicle]]): translation_domain=DOMAIN, translation_key="api_error" ) from e except IturanAuthError as e: - raise ConfigEntryError( + raise ConfigEntryAuthFailed( translation_domain=DOMAIN, translation_key="auth_error" ) from e diff --git a/homeassistant/components/ituran/quality_scale.yaml b/homeassistant/components/ituran/quality_scale.yaml index 71f82aa1971..71d0d9698da 100644 --- a/homeassistant/components/ituran/quality_scale.yaml +++ b/homeassistant/components/ituran/quality_scale.yaml @@ -35,7 +35,7 @@ rules: status: exempt comment: | This integration does not provide additional actions. - reauthentication-flow: todo + reauthentication-flow: done parallel-updates: status: exempt comment: | diff --git a/homeassistant/components/ituran/strings.json b/homeassistant/components/ituran/strings.json index e9f785289b8..212dbd1b86a 100644 --- a/homeassistant/components/ituran/strings.json +++ b/homeassistant/components/ituran/strings.json @@ -7,7 +7,7 @@ "phone_number": "Mobile phone number" }, "data_description": { - "id_or_passport": "The goverment ID or passport number provided when registering with Ituran.", + "id_or_passport": "The government ID or passport number provided when registering with Ituran.", "phone_number": "The mobile phone number provided when registering with Ituran. A one-time password will be sent to this mobile number." } }, @@ -18,6 +18,10 @@ "data_description": { "otp": "A one-time-password sent as a text message to the mobile phone number provided before." } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "A new one-time password will be sent to {phone_number}." } }, "error": { @@ -27,15 +31,16 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" } }, "exceptions": { "api_error": { - "message": "An error occured while communicating with the Ituran service." + "message": "An error occurred while communicating with the Ituran service." }, "auth_error": { - "message": "Failed authenticating with the Ituran service, please remove and re-add integration." + "message": "Failed authenticating with the Ituran service, please reauthenticate the integration." } } } diff --git a/tests/components/ituran/test_config_flow.py b/tests/components/ituran/test_config_flow.py index 0e0f6f63b9a..19253103ad7 100644 --- a/tests/components/ituran/test_config_flow.py +++ b/tests/components/ituran/test_config_flow.py @@ -16,8 +16,11 @@ from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_integration from .const import MOCK_CONFIG_DATA +from tests.common import MockConfigEntry + async def __do_successful_user_step( hass: HomeAssistant, result: ConfigFlowResult, mock_ituran: AsyncMock @@ -209,3 +212,43 @@ async def test_already_authenticated( assert result["data"][CONF_PHONE_NUMBER] == MOCK_CONFIG_DATA[CONF_PHONE_NUMBER] assert result["data"][CONF_MOBILE_ID] == MOCK_CONFIG_DATA[CONF_MOBILE_ID] assert result["result"].unique_id == MOCK_CONFIG_DATA[CONF_ID_OR_PASSPORT] + + +async def test_reauth( + hass: HomeAssistant, + mock_ituran: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauthenticating.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await __do_successful_user_step(hass, result, mock_ituran) + await __do_successful_otp_step(hass, result, mock_ituran) + + await setup_integration(hass, mock_config_entry) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] is None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "otp" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_OTP: "123456", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" From 6f278fb8560ffbb2d89e62ae0c266e9da3a939a3 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Mon, 16 Dec 2024 14:13:19 +0100 Subject: [PATCH 306/677] Remove custom "unknown" state from Fronius Enum sensor (#133361) --- homeassistant/components/fronius/const.py | 8 +++----- homeassistant/components/fronius/strings.json | 4 +--- .../fronius/snapshots/test_sensor.ambr | 16 ---------------- 3 files changed, 4 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/fronius/const.py b/homeassistant/components/fronius/const.py index 083085270e0..273f1acab41 100644 --- a/homeassistant/components/fronius/const.py +++ b/homeassistant/components/fronius/const.py @@ -42,8 +42,6 @@ class InverterStatusCodeOption(StrEnum): IDLE = "idle" READY = "ready" SLEEPING = "sleeping" - UNKNOWN = "unknown" - INVALID = "invalid" _INVERTER_STATUS_CODES: Final[dict[int, InverterStatusCodeOption]] = { @@ -61,13 +59,13 @@ _INVERTER_STATUS_CODES: Final[dict[int, InverterStatusCodeOption]] = { 11: InverterStatusCodeOption.IDLE, 12: InverterStatusCodeOption.READY, 13: InverterStatusCodeOption.SLEEPING, - 255: InverterStatusCodeOption.UNKNOWN, + # 255: "Unknown" is handled by `None` state - same as the invalid codes. } -def get_inverter_status_message(code: StateType) -> InverterStatusCodeOption: +def get_inverter_status_message(code: StateType) -> InverterStatusCodeOption | None: """Return a status message for a given status code.""" - return _INVERTER_STATUS_CODES.get(code, InverterStatusCodeOption.INVALID) # type: ignore[arg-type] + return _INVERTER_STATUS_CODES.get(code) # type: ignore[arg-type] class MeterLocationCodeOption(StrEnum): diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index 51cb087efc2..e2740c76696 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -86,9 +86,7 @@ "error": "Error", "idle": "Idle", "ready": "Ready", - "sleeping": "Sleeping", - "unknown": "Unknown", - "invalid": "Invalid" + "sleeping": "Sleeping" } }, "led_state": { diff --git a/tests/components/fronius/snapshots/test_sensor.ambr b/tests/components/fronius/snapshots/test_sensor.ambr index 700c09da2f6..8f8c9d919fc 100644 --- a/tests/components/fronius/snapshots/test_sensor.ambr +++ b/tests/components/fronius/snapshots/test_sensor.ambr @@ -560,8 +560,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'config_entry_id': , @@ -605,8 +603,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'context': , @@ -3815,8 +3811,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'config_entry_id': , @@ -3860,8 +3854,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'context': , @@ -7234,8 +7226,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'config_entry_id': , @@ -7279,8 +7269,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'context': , @@ -7949,8 +7937,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'config_entry_id': , @@ -7994,8 +7980,6 @@ 'idle', 'ready', 'sleeping', - 'unknown', - 'invalid', ]), }), 'context': , From a34992c0b517521b312f18812e431f5acedac664 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Mon, 16 Dec 2024 15:13:50 +0100 Subject: [PATCH 307/677] Velbus add PARALLEL_UPDATES to all platforms (#133155) --- homeassistant/components/velbus/binary_sensor.py | 2 ++ homeassistant/components/velbus/button.py | 2 ++ homeassistant/components/velbus/climate.py | 2 ++ homeassistant/components/velbus/cover.py | 2 ++ homeassistant/components/velbus/light.py | 2 ++ homeassistant/components/velbus/quality_scale.yaml | 2 +- homeassistant/components/velbus/select.py | 2 ++ homeassistant/components/velbus/sensor.py | 2 ++ homeassistant/components/velbus/switch.py | 2 ++ 9 files changed, 17 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/velbus/binary_sensor.py b/homeassistant/components/velbus/binary_sensor.py index 584f28e394a..88dc994efe8 100644 --- a/homeassistant/components/velbus/binary_sensor.py +++ b/homeassistant/components/velbus/binary_sensor.py @@ -9,6 +9,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/button.py b/homeassistant/components/velbus/button.py index 910ae59b69e..fc943159123 100644 --- a/homeassistant/components/velbus/button.py +++ b/homeassistant/components/velbus/button.py @@ -15,6 +15,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/climate.py b/homeassistant/components/velbus/climate.py index e9128ef7de1..b2f3077ecee 100644 --- a/homeassistant/components/velbus/climate.py +++ b/homeassistant/components/velbus/climate.py @@ -20,6 +20,8 @@ from . import VelbusConfigEntry from .const import DOMAIN, PRESET_MODES from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 9257dd3f36f..2ddea37f2d6 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -17,6 +17,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/light.py b/homeassistant/components/velbus/light.py index afe3104aa9a..1adf52a8198 100644 --- a/homeassistant/components/velbus/light.py +++ b/homeassistant/components/velbus/light.py @@ -28,6 +28,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/quality_scale.yaml b/homeassistant/components/velbus/quality_scale.yaml index 9a48e84da93..477b6768e71 100644 --- a/homeassistant/components/velbus/quality_scale.yaml +++ b/homeassistant/components/velbus/quality_scale.yaml @@ -36,7 +36,7 @@ rules: entity-unavailable: todo integration-owner: done log-when-unavailable: done - parallel-updates: todo + parallel-updates: done reauthentication-flow: status: exempt comment: | diff --git a/homeassistant/components/velbus/select.py b/homeassistant/components/velbus/select.py index c0a0a5f532d..6c2dfe0a3b1 100644 --- a/homeassistant/components/velbus/select.py +++ b/homeassistant/components/velbus/select.py @@ -10,6 +10,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/sensor.py b/homeassistant/components/velbus/sensor.py index 2c341ea851d..77833da3ee1 100644 --- a/homeassistant/components/velbus/sensor.py +++ b/homeassistant/components/velbus/sensor.py @@ -15,6 +15,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/velbus/switch.py b/homeassistant/components/velbus/switch.py index dccb0a02ffa..8256e716d4f 100644 --- a/homeassistant/components/velbus/switch.py +++ b/homeassistant/components/velbus/switch.py @@ -11,6 +11,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import VelbusConfigEntry from .entity import VelbusEntity, api_call +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, From 14f4f8aeb59481776525663f75ddf4ec0f3a9cd3 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 16 Dec 2024 15:37:29 +0100 Subject: [PATCH 308/677] Update hassio backup agents on mount added or removed (#133344) * Update hassio backup agents on mount added or removed * Address review comments --- homeassistant/components/hassio/backup.py | 34 +++++++++++++ tests/components/conftest.py | 3 ++ tests/components/hassio/test_backup.py | 62 +++++++++++++++++++++++ 3 files changed, 99 insertions(+) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index e544a56a3c8..0353255fe7b 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from collections.abc import AsyncIterator, Callable, Coroutine, Mapping +import logging from pathlib import Path from typing import Any, cast @@ -32,6 +33,8 @@ from .const import DOMAIN, EVENT_SUPERVISOR_EVENT from .handler import get_supervisor_client LOCATION_CLOUD_BACKUP = ".cloud_backup" +MOUNT_JOBS = ("mount_manager_create_mount", "mount_manager_remove_mount") +_LOGGER = logging.getLogger(__name__) async def async_get_backup_agents( @@ -49,6 +52,37 @@ async def async_get_backup_agents( return agents +@callback +def async_register_backup_agents_listener( + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, +) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed.""" + + @callback + def unsub() -> None: + """Unsubscribe from job events.""" + unsub_signal() + + @callback + def handle_signal(data: Mapping[str, Any]) -> None: + """Handle a job signal.""" + if ( + data.get("event") != "job" + or not (event_data := data.get("data")) + or event_data.get("name") not in MOUNT_JOBS + or event_data.get("done") is not True + ): + return + _LOGGER.debug("Mount added or removed %s, calling listener", data) + listener() + + unsub_signal = async_dispatcher_connect(hass, EVENT_SUPERVISOR_EVENT, handle_signal) + return unsub + + def _backup_details_to_agent_backup( details: supervisor_backups.BackupComplete, ) -> AgentBackup: diff --git a/tests/components/conftest.py b/tests/components/conftest.py index ac30d105299..3828cc5ff37 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -514,11 +514,14 @@ def resolution_suggestions_for_issue_fixture(supervisor_client: AsyncMock) -> As @pytest.fixture(name="supervisor_client") def supervisor_client() -> Generator[AsyncMock]: """Mock the supervisor client.""" + mounts_info_mock = AsyncMock(spec_set=["mounts"]) + mounts_info_mock.mounts = [] supervisor_client = AsyncMock() supervisor_client.addons = AsyncMock() supervisor_client.discovery = AsyncMock() supervisor_client.homeassistant = AsyncMock() supervisor_client.host = AsyncMock() + supervisor_client.mounts.info.return_value = mounts_info_mock supervisor_client.os = AsyncMock() supervisor_client.resolution = AsyncMock() supervisor_client.supervisor = AsyncMock() diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 660753bd815..3e928bc996b 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -231,6 +231,68 @@ async def test_agent_delete_backup( supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) +@pytest.mark.usefixtures("hassio_client") +@pytest.mark.parametrize( + ("event_data", "mount_info_calls"), + [ + ( + { + "event": "job", + "data": {"name": "mount_manager_create_mount", "done": True}, + }, + 1, + ), + ( + { + "event": "job", + "data": {"name": "mount_manager_create_mount", "done": False}, + }, + 0, + ), + ( + { + "event": "job", + "data": {"name": "mount_manager_remove_mount", "done": True}, + }, + 1, + ), + ( + { + "event": "job", + "data": {"name": "mount_manager_remove_mount", "done": False}, + }, + 0, + ), + ({"event": "job", "data": {"name": "other_job", "done": True}}, 0), + ( + { + "event": "other_event", + "data": {"name": "mount_manager_remove_mount", "done": True}, + }, + 0, + ), + ], +) +async def test_agents_notify_on_mount_added_removed( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + event_data: dict[str, Any], + mount_info_calls: int, +) -> None: + """Test the listener is called when mounts are added or removed.""" + client = await hass_ws_client(hass) + assert supervisor_client.mounts.info.call_count == 1 + assert supervisor_client.mounts.info.call_args[0] == () + supervisor_client.mounts.info.reset_mock() + + await client.send_json_auto_id({"type": "supervisor/event", "data": event_data}) + response = await client.receive_json() + assert response["success"] + await hass.async_block_till_done() + assert supervisor_client.mounts.info.call_count == mount_info_calls + + @pytest.mark.usefixtures("hassio_client") async def test_reader_writer_create( hass: HomeAssistant, From 5adb7f4542ad116672e16580348fb9b14ea211b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=85ke=20Strandberg?= Date: Mon, 16 Dec 2024 15:42:15 +0100 Subject: [PATCH 309/677] Translate exception messages in myUplink (#131626) * Translate exceptions * Add one more translation * Adding more translations * Make message easier to understand for end-user * Clarify message * Address review comments --- homeassistant/components/myuplink/__init__.py | 20 +++++++++++++++---- homeassistant/components/myuplink/number.py | 10 ++++++++-- .../components/myuplink/quality_scale.yaml | 4 +--- homeassistant/components/myuplink/select.py | 9 ++++++++- .../components/myuplink/strings.json | 20 +++++++++++++++++++ homeassistant/components/myuplink/switch.py | 8 ++++++-- 6 files changed, 59 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/myuplink/__init__.py b/homeassistant/components/myuplink/__init__.py index e833c5fcd8e..5ad114e973e 100644 --- a/homeassistant/components/myuplink/__init__.py +++ b/homeassistant/components/myuplink/__init__.py @@ -55,13 +55,25 @@ async def async_setup_entry( await auth.async_get_access_token() except ClientResponseError as err: if err.status in {HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN}: - raise ConfigEntryAuthFailed from err - raise ConfigEntryNotReady from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="config_entry_auth_failed", + ) from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from err except ClientError as err: - raise ConfigEntryNotReady from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from err if set(config_entry.data["token"]["scope"].split(" ")) != set(OAUTH2_SCOPES): - raise ConfigEntryAuthFailed("Incorrect OAuth2 scope") + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="incorrect_oauth2_scope", + ) # Setup MyUplinkAPI and coordinator for data fetch api = MyUplinkAPI(auth) diff --git a/homeassistant/components/myuplink/number.py b/homeassistant/components/myuplink/number.py index 3d336953396..e1cbd393947 100644 --- a/homeassistant/components/myuplink/number.py +++ b/homeassistant/components/myuplink/number.py @@ -10,7 +10,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator -from .const import F_SERIES +from .const import DOMAIN, F_SERIES from .entity import MyUplinkEntity from .helpers import find_matching_platform, skip_entity, transform_model_series @@ -137,7 +137,13 @@ class MyUplinkNumber(MyUplinkEntity, NumberEntity): ) except ClientError as err: raise HomeAssistantError( - f"Failed to set new value {value} for {self.point_id}/{self.entity_id}" + translation_domain=DOMAIN, + translation_key="set_number_error", + translation_placeholders={ + "entity": self.entity_id, + "point": self.point_id, + "value": str(value), + }, ) from err await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/myuplink/quality_scale.yaml b/homeassistant/components/myuplink/quality_scale.yaml index dbe771f7eb2..be0780a206c 100644 --- a/homeassistant/components/myuplink/quality_scale.yaml +++ b/homeassistant/components/myuplink/quality_scale.yaml @@ -78,9 +78,7 @@ rules: It is not feasible to use the API names as translation keys as they can change between firmware and API upgrades and the number of appliance models and firmware releases are huge. Entity names translations are therefore not implemented for the time being. - exception-translations: - status: todo - comment: PR pending review \#191937 + exception-translations: done icon-translations: done reconfiguration-flow: done repair-issues: diff --git a/homeassistant/components/myuplink/select.py b/homeassistant/components/myuplink/select.py index 96058b916b3..0074d1c75ff 100644 --- a/homeassistant/components/myuplink/select.py +++ b/homeassistant/components/myuplink/select.py @@ -12,6 +12,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator +from .const import DOMAIN from .entity import MyUplinkEntity from .helpers import find_matching_platform, skip_entity @@ -86,7 +87,13 @@ class MyUplinkSelect(MyUplinkEntity, SelectEntity): ) except ClientError as err: raise HomeAssistantError( - f"Failed to set new option {self.options_rev[option]} for {self.point_id}/{self.entity_id}" + translation_domain=DOMAIN, + translation_key="set_select_error", + translation_placeholders={ + "entity": self.entity_id, + "option": self.options_rev[option], + "point": self.point_id, + }, ) from err await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/myuplink/strings.json b/homeassistant/components/myuplink/strings.json index d3d2f198448..939aa2f17c8 100644 --- a/homeassistant/components/myuplink/strings.json +++ b/homeassistant/components/myuplink/strings.json @@ -42,5 +42,25 @@ "name": "Status" } } + }, + "exceptions": { + "config_entry_auth_failed": { + "message": "Error while logging in to the API. Please check your credentials." + }, + "config_entry_not_ready": { + "message": "Error while loading the integration." + }, + "incorrect_oauth2_scope": { + "message": "Stored permissions are invalid. Please login again to update permissions." + }, + "set_number_error": { + "message": "Failed to set new value {value} for {point}/{entity}." + }, + "set_select_error": { + "message": "Failed to set new option {option} for {point}/{entity}." + }, + "set_switch_error": { + "message": "Failed to set state for {entity}." + } } } diff --git a/homeassistant/components/myuplink/switch.py b/homeassistant/components/myuplink/switch.py index 75ba6bd7819..3addc7ce6a9 100644 --- a/homeassistant/components/myuplink/switch.py +++ b/homeassistant/components/myuplink/switch.py @@ -12,7 +12,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import MyUplinkConfigEntry, MyUplinkDataCoordinator -from .const import F_SERIES +from .const import DOMAIN, F_SERIES from .entity import MyUplinkEntity from .helpers import find_matching_platform, skip_entity, transform_model_series @@ -129,7 +129,11 @@ class MyUplinkDevicePointSwitch(MyUplinkEntity, SwitchEntity): ) except aiohttp.ClientError as err: raise HomeAssistantError( - f"Failed to set state for {self.entity_id}" + translation_domain=DOMAIN, + translation_key="set_switch_error", + translation_placeholders={ + "entity": self.entity_id, + }, ) from err await self.coordinator.async_request_refresh() From cefb4a4ccc37431f144781cabba23ad31d9d30bc Mon Sep 17 00:00:00 2001 From: Andrew Sayre <6730289+andrewsayre@users.noreply.github.com> Date: Mon, 16 Dec 2024 10:08:14 -0600 Subject: [PATCH 310/677] Add HEOS reconfigure flow (#133326) * Add reconfig flow * Add reconfigure tests * Mark reconfigure_flow done * Review feedback * Update tests to always end in terminal state * Correct test name and docstring --- homeassistant/components/heos/config_flow.py | 46 +++++++++--- .../components/heos/quality_scale.yaml | 2 +- homeassistant/components/heos/strings.json | 21 ++++-- tests/components/heos/conftest.py | 5 +- tests/components/heos/test_config_flow.py | 74 ++++++++++++++++++- 5 files changed, 129 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/heos/config_flow.py b/homeassistant/components/heos/config_flow.py index e8a4dbf7b63..f861247d1a9 100644 --- a/homeassistant/components/heos/config_flow.py +++ b/homeassistant/components/heos/config_flow.py @@ -15,7 +15,20 @@ from .const import DOMAIN def format_title(host: str) -> str: """Format the title for config entries.""" - return f"Controller ({host})" + return f"HEOS System (via {host})" + + +async def _validate_host(host: str, errors: dict[str, str]) -> bool: + """Validate host is reachable, return True, otherwise populate errors and return False.""" + heos = Heos(host) + try: + await heos.connect() + except HeosError: + errors[CONF_HOST] = "cannot_connect" + return False + finally: + await heos.disconnect() + return True class HeosFlowHandler(ConfigFlow, domain=DOMAIN): @@ -47,23 +60,17 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): self.hass.data.setdefault(DOMAIN, {}) await self.async_set_unique_id(DOMAIN) # Try connecting to host if provided - errors = {} + errors: dict[str, str] = {} host = None if user_input is not None: host = user_input[CONF_HOST] # Map host from friendly name if in discovered hosts host = self.hass.data[DOMAIN].get(host, host) - heos = Heos(host) - try: - await heos.connect() - self.hass.data.pop(DOMAIN) + if await _validate_host(host, errors): + self.hass.data.pop(DOMAIN) # Remove discovery data return self.async_create_entry( title=format_title(host), data={CONF_HOST: host} ) - except HeosError: - errors[CONF_HOST] = "cannot_connect" - finally: - await heos.disconnect() # Return form host_type = ( @@ -74,3 +81,22 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_HOST, default=host): host_type}), errors=errors, ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Allow reconfiguration of entry.""" + entry = self._get_reconfigure_entry() + host = entry.data[CONF_HOST] # Get current host value + errors: dict[str, str] = {} + if user_input is not None: + host = user_input[CONF_HOST] + if await _validate_host(host, errors): + return self.async_update_reload_and_abort( + entry, data_updates={CONF_HOST: host} + ) + return self.async_show_form( + step_id="reconfigure", + data_schema=vol.Schema({vol.Required(CONF_HOST, default=host): str}), + errors=errors, + ) diff --git a/homeassistant/components/heos/quality_scale.yaml b/homeassistant/components/heos/quality_scale.yaml index 861ca750780..39c25486e52 100644 --- a/homeassistant/components/heos/quality_scale.yaml +++ b/homeassistant/components/heos/quality_scale.yaml @@ -88,7 +88,7 @@ rules: entity-translations: done exception-translations: todo icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: todo stale-devices: todo # Platinum diff --git a/homeassistant/components/heos/strings.json b/homeassistant/components/heos/strings.json index 20a8a2e978b..fe4fc63b449 100644 --- a/homeassistant/components/heos/strings.json +++ b/homeassistant/components/heos/strings.json @@ -2,13 +2,23 @@ "config": { "step": { "user": { - "title": "Connect to Heos", - "description": "Please enter the host name or IP address of a Heos device (preferably one connected via wire to the network).", + "title": "Connect to HEOS", + "description": "Please enter the host name or IP address of a HEOS-capable product to access your HEOS System.", "data": { "host": "[%key:common::config_flow::data::host%]" }, "data_description": { - "host": "The hostname or IP address of your HEOS device." + "host": "Host name or IP address of a HEOS-capable product (preferrably one connected via wire to the network)." + } + }, + "reconfigure": { + "title": "Reconfigure HEOS", + "description": "Change the host name or IP address of the HEOS-capable product used to access your HEOS System.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "[%key:component::heos::config::step::user::data_description::host%]" } } }, @@ -17,13 +27,14 @@ }, "abort": { "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" } }, "services": { "sign_in": { "name": "Sign in", - "description": "Signs the controller in to a HEOS account.", + "description": "Signs in to a HEOS account.", "fields": { "username": { "name": "[%key:common::config_flow::data::username%]", @@ -37,7 +48,7 @@ }, "sign_out": { "name": "Sign out", - "description": "Signs the controller out of the HEOS account." + "description": "Signs out of the HEOS account." } } } diff --git a/tests/components/heos/conftest.py b/tests/components/heos/conftest.py index 95a388d87a8..9ea3341304a 100644 --- a/tests/components/heos/conftest.py +++ b/tests/components/heos/conftest.py @@ -27,7 +27,10 @@ from tests.common import MockConfigEntry def config_entry_fixture(): """Create a mock HEOS config entry.""" return MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, title="Controller (127.0.0.1)" + domain=DOMAIN, + data={CONF_HOST: "127.0.0.1"}, + title="HEOS System (via 127.0.0.1)", + unique_id=DOMAIN, ) diff --git a/tests/components/heos/test_config_flow.py b/tests/components/heos/test_config_flow.py index 464b62df157..38382a81794 100644 --- a/tests/components/heos/test_config_flow.py +++ b/tests/components/heos/test_config_flow.py @@ -54,7 +54,7 @@ async def test_create_entry_when_host_valid(hass: HomeAssistant, controller) -> ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == DOMAIN - assert result["title"] == "Controller (127.0.0.1)" + assert result["title"] == "HEOS System (via 127.0.0.1)" assert result["data"] == data assert controller.connect.call_count == 2 # Also called in async_setup_entry assert controller.disconnect.call_count == 1 @@ -73,7 +73,7 @@ async def test_create_entry_when_friendly_name_valid( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].unique_id == DOMAIN - assert result["title"] == "Controller (127.0.0.1)" + assert result["title"] == "HEOS System (via 127.0.0.1)" assert result["data"] == {CONF_HOST: "127.0.0.1"} assert controller.connect.call_count == 2 # Also called in async_setup_entry assert controller.disconnect.call_count == 1 @@ -120,3 +120,73 @@ async def test_discovery_flow_aborts_already_setup( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" + + +async def test_reconfigure_validates_and_updates_config( + hass: HomeAssistant, config_entry, controller +) -> None: + """Test reconfigure validates host and successfully updates.""" + config_entry.add_to_hass(hass) + result = await config_entry.start_reconfigure_flow(hass) + assert config_entry.data[CONF_HOST] == "127.0.0.1" + + # Test reconfigure initially shows form with current host value. + host = next( + key.default() for key in result["data_schema"].schema if key == CONF_HOST + ) + assert host == "127.0.0.1" + assert result["errors"] == {} + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + + # Test reconfigure successfully updates. + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.2"}, + ) + assert controller.connect.call_count == 2 # Also called when entry reloaded + assert controller.disconnect.call_count == 1 + assert config_entry.data == {CONF_HOST: "127.0.0.2"} + assert config_entry.unique_id == DOMAIN + assert result["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + + +async def test_reconfigure_cannot_connect_recovers( + hass: HomeAssistant, config_entry, controller +) -> None: + """Test reconfigure cannot connect and recovers.""" + controller.connect.side_effect = HeosError() + config_entry.add_to_hass(hass) + result = await config_entry.start_reconfigure_flow(hass) + assert config_entry.data[CONF_HOST] == "127.0.0.1" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.2"}, + ) + + assert controller.connect.call_count == 1 + assert controller.disconnect.call_count == 1 + host = next( + key.default() for key in result["data_schema"].schema if key == CONF_HOST + ) + assert host == "127.0.0.2" + assert result["errors"][CONF_HOST] == "cannot_connect" + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + + # Test reconfigure recovers and successfully updates. + controller.connect.side_effect = None + controller.connect.reset_mock() + controller.disconnect.reset_mock() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.2"}, + ) + assert controller.connect.call_count == 2 # Also called when entry reloaded + assert controller.disconnect.call_count == 1 + assert config_entry.data == {CONF_HOST: "127.0.0.2"} + assert config_entry.unique_id == DOMAIN + assert result["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT From 239767ee62a29950d4c3d694d3d237f73a08a5a1 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 16 Dec 2024 17:48:59 +0100 Subject: [PATCH 311/677] Set default min/max color temperature in mqtt lights (#133356) --- homeassistant/components/mqtt/light/schema_basic.py | 6 ++++-- homeassistant/components/mqtt/light/schema_json.py | 6 ++++-- homeassistant/components/mqtt/light/schema_template.py | 6 ++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index 635c552f37e..159a23d14d9 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -26,6 +26,8 @@ from homeassistant.components.light import ( ATTR_SUPPORTED_COLOR_MODES, ATTR_WHITE, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ENTITY_ID_FORMAT, ColorMode, LightEntity, @@ -264,12 +266,12 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity): self._attr_min_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(max_mireds) if (max_mireds := config.get(CONF_MAX_MIREDS)) - else super().min_color_temp_kelvin + else DEFAULT_MIN_KELVIN ) self._attr_max_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(min_mireds) if (min_mireds := config.get(CONF_MIN_MIREDS)) - else super().max_color_temp_kelvin + else DEFAULT_MAX_KELVIN ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index 5880a684ec0..f6efdd3281d 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -22,6 +22,8 @@ from homeassistant.components.light import ( ATTR_TRANSITION, ATTR_WHITE, ATTR_XY_COLOR, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, DOMAIN as LIGHT_DOMAIN, ENTITY_ID_FORMAT, FLASH_LONG, @@ -276,12 +278,12 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity): self._attr_min_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(max_mireds) if (max_mireds := config.get(CONF_MAX_MIREDS)) - else super().min_color_temp_kelvin + else DEFAULT_MIN_KELVIN ) self._attr_max_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(min_mireds) if (min_mireds := config.get(CONF_MIN_MIREDS)) - else super().max_color_temp_kelvin + else DEFAULT_MAX_KELVIN ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) diff --git a/homeassistant/components/mqtt/light/schema_template.py b/homeassistant/components/mqtt/light/schema_template.py index 7427d25533e..722bd864366 100644 --- a/homeassistant/components/mqtt/light/schema_template.py +++ b/homeassistant/components/mqtt/light/schema_template.py @@ -15,6 +15,8 @@ from homeassistant.components.light import ( ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ENTITY_ID_FORMAT, ColorMode, LightEntity, @@ -129,12 +131,12 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): self._attr_min_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(max_mireds) if (max_mireds := config.get(CONF_MAX_MIREDS)) - else super().min_color_temp_kelvin + else DEFAULT_MIN_KELVIN ) self._attr_max_color_temp_kelvin = ( color_util.color_temperature_mired_to_kelvin(min_mireds) if (min_mireds := config.get(CONF_MIN_MIREDS)) - else super().max_color_temp_kelvin + else DEFAULT_MAX_KELVIN ) self._attr_effect_list = config.get(CONF_EFFECT_LIST) From 77fb440ed414e10c5771a9ad66f13756334441e4 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Mon, 16 Dec 2024 18:06:06 +0000 Subject: [PATCH 312/677] Bump `imgw-pib` to version 1.0.7 (#133364) --- homeassistant/components/imgw_pib/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/imgw_pib/manifest.json b/homeassistant/components/imgw_pib/manifest.json index b5c35f3f1eb..ce3bc14d37b 100644 --- a/homeassistant/components/imgw_pib/manifest.json +++ b/homeassistant/components/imgw_pib/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/imgw_pib", "iot_class": "cloud_polling", - "requirements": ["imgw_pib==1.0.6"] + "requirements": ["imgw_pib==1.0.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9ffc6a8f16e..5eecf96d096 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1193,7 +1193,7 @@ iglo==1.2.7 ihcsdk==2.8.5 # homeassistant.components.imgw_pib -imgw_pib==1.0.6 +imgw_pib==1.0.7 # homeassistant.components.incomfort incomfort-client==0.6.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 25c4167a0bf..c10645dc293 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1007,7 +1007,7 @@ idasen-ha==2.6.2 ifaddr==0.2.0 # homeassistant.components.imgw_pib -imgw_pib==1.0.6 +imgw_pib==1.0.7 # homeassistant.components.incomfort incomfort-client==0.6.4 From 482ad6fbee4385eb06ea584be71e4190d06f0061 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Mon, 16 Dec 2024 19:12:15 +0100 Subject: [PATCH 313/677] Increase backup upload timeout (#132990) --- homeassistant/components/cloud/backup.py | 5 +++-- tests/components/cloud/test_backup.py | 4 +++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index 2c7cc9d7bd5..d394daa7dc5 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -7,7 +7,7 @@ from collections.abc import AsyncIterator, Callable, Coroutine import hashlib from typing import Any, Self -from aiohttp import ClientError, StreamReader +from aiohttp import ClientError, ClientTimeout, StreamReader from hass_nabucasa import Cloud, CloudError from hass_nabucasa.cloud_api import ( async_files_delete_file, @@ -151,9 +151,10 @@ class CloudBackupAgent(BackupAgent): details["url"], data=await open_stream(), headers=details["headers"] | {"content-length": str(backup.size)}, + timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h ) upload_status.raise_for_status() - except ClientError as err: + except (TimeoutError, ClientError) as err: raise BackupAgentError("Failed to upload backup") from err async def async_delete_backup( diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index d5dc8751d82..ac0ef1826de 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -372,6 +372,7 @@ async def test_agents_upload( assert f"Uploading backup {backup_id}" in caplog.text +@pytest.mark.parametrize("put_mock_kwargs", [{"status": 500}, {"exc": TimeoutError}]) @pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") async def test_agents_upload_fail_put( hass: HomeAssistant, @@ -379,6 +380,7 @@ async def test_agents_upload_fail_put( caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, mock_get_upload_details: Mock, + put_mock_kwargs: dict[str, Any], ) -> None: """Test agent upload backup fails.""" client = await hass_client() @@ -395,7 +397,7 @@ async def test_agents_upload_fail_put( protected=True, size=0.0, ) - aioclient_mock.put(mock_get_upload_details.return_value["url"], status=500) + aioclient_mock.put(mock_get_upload_details.return_value["url"], **put_mock_kwargs) with ( patch( From e6e9788ecda78d45a4ec5e7ff96ca4e3a7ebff06 Mon Sep 17 00:00:00 2001 From: Simon <80467011+sorgfresser@users.noreply.github.com> Date: Mon, 16 Dec 2024 18:18:09 +0000 Subject: [PATCH 314/677] Add quality scale to ElevenLabs (#133276) --- .../components/elevenlabs/__init__.py | 4 +- .../components/elevenlabs/config_flow.py | 12 +-- .../components/elevenlabs/quality_scale.yaml | 92 +++++++++++++++++++ homeassistant/components/elevenlabs/tts.py | 3 + script/hassfest/quality_scale.py | 1 - 5 files changed, 101 insertions(+), 11 deletions(-) create mode 100644 homeassistant/components/elevenlabs/quality_scale.yaml diff --git a/homeassistant/components/elevenlabs/__init__.py b/homeassistant/components/elevenlabs/__init__.py index db7a7f64c97..84b2b61b8ed 100644 --- a/homeassistant/components/elevenlabs/__init__.py +++ b/homeassistant/components/elevenlabs/__init__.py @@ -10,7 +10,7 @@ from elevenlabs.core import ApiError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError from homeassistant.helpers.httpx_client import get_async_client from .const import CONF_MODEL @@ -49,7 +49,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: EleventLabsConfigEntry) try: model = await get_model_by_id(client, model_id) except ApiError as err: - raise ConfigEntryError("Auth failed") from err + raise ConfigEntryAuthFailed("Auth failed") from err if model is None or (not model.languages): raise ConfigEntryError("Model could not be resolved") diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py index 55cdd3ea944..60df79d6eaa 100644 --- a/homeassistant/components/elevenlabs/config_flow.py +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -9,12 +9,7 @@ from elevenlabs import AsyncElevenLabs from elevenlabs.core import ApiError import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.helpers.httpx_client import get_async_client @@ -24,6 +19,7 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, ) +from . import EleventLabsConfigEntry from .const import ( CONF_CONFIGURE_VOICE, CONF_MODEL, @@ -96,7 +92,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: EleventLabsConfigEntry, ) -> OptionsFlow: """Create the options flow.""" return ElevenLabsOptionsFlow(config_entry) @@ -105,7 +101,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): class ElevenLabsOptionsFlow(OptionsFlow): """ElevenLabs options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self, config_entry: EleventLabsConfigEntry) -> None: """Initialize options flow.""" self.api_key: str = config_entry.data[CONF_API_KEY] # id -> name diff --git a/homeassistant/components/elevenlabs/quality_scale.yaml b/homeassistant/components/elevenlabs/quality_scale.yaml new file mode 100644 index 00000000000..49f0d7518f5 --- /dev/null +++ b/homeassistant/components/elevenlabs/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + action-setup: + status: done + comment: > + Only entity services + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: > + We should have every test end in either ABORT or CREATE_ENTRY. + test_invalid_api_key should assert the kind of error that is raised. + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: > + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: todo + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: todo + # Silver + config-entry-unloading: done + log-when-unavailable: todo + entity-unavailable: + status: exempt + comment: > + There is no state in the TTS platform and we can't check poll if the TTS service is available. + action-exceptions: done + reauthentication-flow: todo + parallel-updates: done + test-coverage: todo + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: todo + + # Gold + entity-translations: todo + entity-device-class: + status: exempt + comment: There is no device class for Text To Speech entities. + devices: done + entity-category: done + entity-disabled-by-default: todo + discovery: + status: exempt + comment: > + This is not possible because there is no physical device. + stale-devices: + status: exempt + comment: > + This is not possible because there is no physical device. + diagnostics: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: + status: todo + comment: > + I imagine this could be useful if the default voice is deleted from voice lab. + dynamic-devices: + status: exempt + comment: | + This is not possible because there is no physical device. + discovery-update-info: + status: exempt + comment: > + This is not needed because there are no physical devices. + repair-issues: todo + docs-use-cases: done + docs-supported-devices: + status: exempt + comment: > + This integration does not support any devices. + docs-supported-functions: todo + docs-data-update: todo + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/elevenlabs/tts.py b/homeassistant/components/elevenlabs/tts.py index 8b016b6af8b..c96a7161b72 100644 --- a/homeassistant/components/elevenlabs/tts.py +++ b/homeassistant/components/elevenlabs/tts.py @@ -16,6 +16,7 @@ from homeassistant.components.tts import ( TtsAudioType, Voice, ) +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo @@ -38,6 +39,7 @@ from .const import ( ) _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 def to_voice_settings(options: MappingProxyType[str, Any]) -> VoiceSettings: @@ -84,6 +86,7 @@ class ElevenLabsTTSEntity(TextToSpeechEntity): """The ElevenLabs API entity.""" _attr_supported_options = [ATTR_VOICE] + _attr_entity_category = EntityCategory.CONFIG def __init__( self, diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 43b4adc90e9..5ad3467dd79 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -338,7 +338,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "eight_sleep", "electrasmart", "electric_kiwi", - "elevenlabs", "eliqonline", "elkm1", "elmax", From 34ab3e033f186fe3e980587eab30c10fac0a1e88 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 16 Dec 2024 19:23:05 +0100 Subject: [PATCH 315/677] Remove support for live recorder data post migration of entity IDs (#133370) --- homeassistant/components/recorder/migration.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index ec9d290049f..b28ca4399c8 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -2738,14 +2738,13 @@ class EventIDPostMigration(BaseRunTimeMigration): return DataMigrationStatus(needs_migrate=False, migration_done=True) -class EntityIDPostMigration(BaseMigrationWithQuery, BaseRunTimeMigration): +class EntityIDPostMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to remove old entity_id strings from states. Introduced in HA Core 2023.4 by PR #89557. """ migration_id = "entity_id_post_migration" - task = MigrationTask index_to_drop = (TABLE_STATES, LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX) def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: @@ -2758,16 +2757,16 @@ class EntityIDPostMigration(BaseMigrationWithQuery, BaseRunTimeMigration): return has_used_states_entity_ids() -NON_LIVE_DATA_MIGRATORS = ( +NON_LIVE_DATA_MIGRATORS: tuple[type[BaseOffLineMigration], ...] = ( StatesContextIDMigration, # Introduced in HA Core 2023.4 EventsContextIDMigration, # Introduced in HA Core 2023.4 EventTypeIDMigration, # Introduced in HA Core 2023.4 by PR #89465 EntityIDMigration, # Introduced in HA Core 2023.4 by PR #89557 + EntityIDPostMigration, # Introduced in HA Core 2023.4 by PR #89557 ) -LIVE_DATA_MIGRATORS = ( +LIVE_DATA_MIGRATORS: tuple[type[BaseRunTimeMigration], ...] = ( EventIDPostMigration, # Introduced in HA Core 2023.4 by PR #89901 - EntityIDPostMigration, # Introduced in HA Core 2023.4 by PR #89557 ) From 6a54edce1991c60381fc21ad7d6a6bdfb2cef2b3 Mon Sep 17 00:00:00 2001 From: Alexandre CUER Date: Mon, 16 Dec 2024 19:26:47 +0100 Subject: [PATCH 316/677] Gives a friendly name to emoncms entities if unit is not specified (#133358) --- homeassistant/components/emoncms/sensor.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/emoncms/sensor.py b/homeassistant/components/emoncms/sensor.py index 9273c24c7dc..291ecad0bd3 100644 --- a/homeassistant/components/emoncms/sensor.py +++ b/homeassistant/components/emoncms/sensor.py @@ -317,7 +317,7 @@ async def async_setup_entry( EmonCmsSensor( coordinator, unique_id, - elem["unit"], + elem.get("unit"), name, idx, ) @@ -353,6 +353,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): self.entity_description = description else: self._attr_native_unit_of_measurement = unit_of_measurement + self._attr_name = f"{name} {elem[FEED_NAME]}" self._update_attributes(elem) def _update_attributes(self, elem: dict[str, Any]) -> None: From 2da7a93139b868088924b0ba7e4632624d1f0ac1 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Mon, 16 Dec 2024 20:53:17 +0100 Subject: [PATCH 317/677] Add switch platform to local_slide (#133369) --- .../components/slide_local/__init__.py | 2 +- .../components/slide_local/strings.json | 5 ++ .../components/slide_local/switch.py | 56 +++++++++++++++++ .../slide_local/snapshots/test_switch.ambr | 48 +++++++++++++++ tests/components/slide_local/test_switch.py | 61 +++++++++++++++++++ 5 files changed, 171 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/slide_local/switch.py create mode 100644 tests/components/slide_local/snapshots/test_switch.ambr create mode 100644 tests/components/slide_local/test_switch.py diff --git a/homeassistant/components/slide_local/__init__.py b/homeassistant/components/slide_local/__init__.py index 6f329477600..5b4867bf337 100644 --- a/homeassistant/components/slide_local/__init__.py +++ b/homeassistant/components/slide_local/__init__.py @@ -8,7 +8,7 @@ from homeassistant.core import HomeAssistant from .coordinator import SlideCoordinator -PLATFORMS = [Platform.BUTTON, Platform.COVER] +PLATFORMS = [Platform.BUTTON, Platform.COVER, Platform.SWITCH] type SlideConfigEntry = ConfigEntry[SlideCoordinator] diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index c593dea8ed7..24c03d2ff96 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -46,6 +46,11 @@ "calibrate": { "name": "Calibrate" } + }, + "switch": { + "touchgo": { + "name": "TouchGo" + } } }, "exceptions": { diff --git a/homeassistant/components/slide_local/switch.py b/homeassistant/components/slide_local/switch.py new file mode 100644 index 00000000000..6d357864c48 --- /dev/null +++ b/homeassistant/components/slide_local/switch.py @@ -0,0 +1,56 @@ +"""Support for Slide switch.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SlideConfigEntry +from .coordinator import SlideCoordinator +from .entity import SlideEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SlideConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up switch for Slide platform.""" + + coordinator = entry.runtime_data + + async_add_entities([SlideSwitch(coordinator)]) + + +class SlideSwitch(SlideEntity, SwitchEntity): + """Defines a Slide switch.""" + + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "touchgo" + _attr_device_class = SwitchDeviceClass.SWITCH + + def __init__(self, coordinator: SlideCoordinator) -> None: + """Initialize the slide switch.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.data["mac"]}-touchgo" + + @property + def is_on(self) -> bool: + """Return if switch is on.""" + return self.coordinator.data["touch_go"] + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off touchgo.""" + await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, False) + await self.coordinator.async_request_refresh() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on touchgo.""" + await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, True) + await self.coordinator.async_request_refresh() diff --git a/tests/components/slide_local/snapshots/test_switch.ambr b/tests/components/slide_local/snapshots/test_switch.ambr new file mode 100644 index 00000000000..e19467c283e --- /dev/null +++ b/tests/components/slide_local/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_all_entities[switch.slide_bedroom_touchgo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.slide_bedroom_touchgo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'TouchGo', + 'platform': 'slide_local', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'touchgo', + 'unique_id': '1234567890ab-touchgo', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.slide_bedroom_touchgo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'slide bedroom TouchGo', + }), + 'context': , + 'entity_id': 'switch.slide_bedroom_touchgo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/slide_local/test_switch.py b/tests/components/slide_local/test_switch.py new file mode 100644 index 00000000000..0ac9820ca10 --- /dev/null +++ b/tests/components/slide_local/test_switch.py @@ -0,0 +1,61 @@ +"""Tests for the Slide Local switch platform.""" + +from unittest.mock import AsyncMock + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_platform(hass, mock_config_entry, [Platform.SWITCH]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("service"), + [ + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_TOGGLE, + ], +) +async def test_services( + hass: HomeAssistant, + service: str, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test switch.""" + await setup_platform(hass, mock_config_entry, [Platform.SWITCH]) + + await hass.services.async_call( + SWITCH_DOMAIN, + service, + { + ATTR_ENTITY_ID: "switch.slide_bedroom_touchgo", + }, + blocking=True, + ) + mock_slide_api.slide_set_touchgo.assert_called_once() From 40182fc197e22acc42976a5008c5b0de139d55ac Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 16 Dec 2024 21:35:55 +0100 Subject: [PATCH 318/677] Load sun via entity component (#132598) * Load sun via entity component * Remove unique id * Remove entity registry --- homeassistant/components/sun/__init__.py | 13 ++++++++++--- homeassistant/components/sun/entity.py | 13 ++++--------- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/sun/__init__.py b/homeassistant/components/sun/__init__.py index 8f6f3098ee8..f42f5450462 100644 --- a/homeassistant/components/sun/__init__.py +++ b/homeassistant/components/sun/__init__.py @@ -2,10 +2,13 @@ from __future__ import annotations +import logging + from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.typing import ConfigType # The sensor platform is pre-imported here to ensure @@ -23,6 +26,8 @@ from .entity import Sun, SunConfigEntry CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +_LOGGER = logging.getLogger(__name__) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Track the state of the sun.""" @@ -42,7 +47,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: SunConfigEntry) -> bool: """Set up from a config entry.""" - entry.runtime_data = sun = Sun(hass) + sun = Sun(hass) + component = EntityComponent[Sun](_LOGGER, DOMAIN, hass) + await component.async_add_entities([sun]) + entry.runtime_data = sun entry.async_on_unload(sun.remove_listeners) await hass.config_entries.async_forward_entry_setups(entry, [Platform.SENSOR]) return True @@ -53,6 +61,5 @@ async def async_unload_entry(hass: HomeAssistant, entry: SunConfigEntry) -> bool if unload_ok := await hass.config_entries.async_unload_platforms( entry, [Platform.SENSOR] ): - sun = entry.runtime_data - hass.states.async_remove(sun.entity_id) + await entry.runtime_data.async_remove() return unload_ok diff --git a/homeassistant/components/sun/entity.py b/homeassistant/components/sun/entity.py index 10d328afde7..925845c8b4d 100644 --- a/homeassistant/components/sun/entity.py +++ b/homeassistant/components/sun/entity.py @@ -100,9 +100,6 @@ class Sun(Entity): _attr_name = "Sun" entity_id = ENTITY_ID - # This entity is legacy and does not have a platform. - # We can't fix this easily without breaking changes. - _no_platform_reported = True location: Location elevation: Elevation @@ -122,18 +119,16 @@ class Sun(Entity): self.hass = hass self.phase: str | None = None - # This is normally done by async_internal_added_to_hass which is not called - # for sun because sun has no platform - self._state_info = { - "unrecorded_attributes": self._Entity__combined_unrecorded_attributes # type: ignore[attr-defined] - } - self._config_listener: CALLBACK_TYPE | None = None self._update_events_listener: CALLBACK_TYPE | None = None self._update_sun_position_listener: CALLBACK_TYPE | None = None self._config_listener = self.hass.bus.async_listen( EVENT_CORE_CONFIG_UPDATE, self.update_location ) + + async def async_added_to_hass(self) -> None: + """Update after entity has been added.""" + await super().async_added_to_hass() self.update_location(initial=True) @callback From 3a622218f45b8888f9aa9e1311000605c385793b Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 16 Dec 2024 21:47:31 +0100 Subject: [PATCH 319/677] Improvements to the LaMetric config flow tests (#133383) --- tests/components/lametric/test_config_flow.py | 330 +++++++++--------- tests/components/lametric/test_init.py | 2 +- 2 files changed, 166 insertions(+), 166 deletions(-) diff --git a/tests/components/lametric/test_config_flow.py b/tests/components/lametric/test_config_flow.py index 3fbe606c7f1..4a546122e30 100644 --- a/tests/components/lametric/test_config_flow.py +++ b/tests/components/lametric/test_config_flow.py @@ -55,25 +55,24 @@ async def test_full_cloud_import_flow_multiple_devices( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) - assert result2.get("type") is FlowResultType.EXTERNAL_STEP - assert result2.get("url") == ( + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( "https://developer.lametric.com/api/v2/oauth2/authorize" "?response_type=code&client_id=client" "&redirect_uri=https://example.com/auth/external/callback" @@ -96,24 +95,26 @@ async def test_full_cloud_import_flow_multiple_devices( }, ) - result3 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result3.get("type") is FlowResultType.FORM - assert result3.get("step_id") == "cloud_select_device" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_select_device" - result4 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result4.get("type") is FlowResultType.CREATE_ENTRY - assert result4.get("title") == "Frenck's LaMetric" - assert result4.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result4 - assert result4["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 @@ -135,25 +136,24 @@ async def test_full_cloud_import_flow_single_device( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) - assert result2.get("type") is FlowResultType.EXTERNAL_STEP - assert result2.get("url") == ( + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( "https://developer.lametric.com/api/v2/oauth2/authorize" "?response_type=code&client_id=client" "&redirect_uri=https://example.com/auth/external/callback" @@ -181,17 +181,19 @@ async def test_full_cloud_import_flow_single_device( mock_lametric_cloud.devices.return_value = [ mock_lametric_cloud.devices.return_value[0] ] - result3 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 @@ -209,31 +211,34 @@ async def test_full_manual( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "manual_entry" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual_entry" - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 @@ -258,25 +263,24 @@ async def test_full_ssdp_with_cloud_import( DOMAIN, context={"source": SOURCE_SSDP}, data=SSDP_DISCOVERY_INFO ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) - assert result2.get("type") is FlowResultType.EXTERNAL_STEP - assert result2.get("url") == ( + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( "https://developer.lametric.com/api/v2/oauth2/authorize" "?response_type=code&client_id=client" "&redirect_uri=https://example.com/auth/external/callback" @@ -299,17 +303,18 @@ async def test_full_ssdp_with_cloud_import( }, ) - result3 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 @@ -327,31 +332,32 @@ async def test_full_ssdp_manual_entry( DOMAIN, context={"source": SOURCE_SSDP}, data=SSDP_DISCOVERY_INFO ) - assert result.get("type") is FlowResultType.MENU - assert result.get("step_id") == "choice_enter_manual_or_fetch_cloud" - assert result.get("menu_options") == ["pick_implementation", "manual_entry"] - flow_id = result["flow_id"] + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "choice_enter_manual_or_fetch_cloud" + assert result["menu_options"] == ["pick_implementation", "manual_entry"] - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "manual_entry" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual_entry" - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_KEY: "mock-api-key"} ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 @@ -385,8 +391,8 @@ async def test_ssdp_abort_invalid_discovery( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_SSDP}, data=data ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == reason + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason @pytest.mark.usefixtures("current_request_with_host") @@ -404,16 +410,15 @@ async def test_cloud_import_updates_existing_entry( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -428,14 +433,14 @@ async def test_cloud_import_updates_existing_entry( "expires_in": 60, }, ) - await hass.config_entries.flow.async_configure(flow_id) + await hass.config_entries.flow.async_configure(result["flow_id"]) - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -458,18 +463,18 @@ async def test_manual_updates_existing_entry( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result3.get("type") is FlowResultType.ABORT - assert result3.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -490,8 +495,8 @@ async def test_discovery_updates_existing_entry( DOMAIN, context={"source": SOURCE_SSDP}, data=SSDP_DISCOVERY_INFO ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-from-fixture", @@ -510,16 +515,15 @@ async def test_cloud_abort_no_devices( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -537,10 +541,10 @@ async def test_cloud_abort_no_devices( # Stage there are no devices mock_lametric_cloud.devices.return_value = [] - result2 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "no_devices" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices" assert len(mock_lametric_cloud.devices.mock_calls) == 1 @@ -565,39 +569,42 @@ async def test_manual_errors( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) mock_lametric.device.side_effect = side_effect - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "manual_entry" - assert result2.get("errors") == {"base": reason} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "manual_entry" + assert result["errors"] == {"base": reason} assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 0 assert len(mock_setup_entry.mock_calls) == 0 mock_lametric.device.side_effect = None - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key"}, ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric.device.mock_calls) == 2 assert len(mock_lametric.notify.mock_calls) == 1 @@ -628,16 +635,15 @@ async def test_cloud_errors( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - flow_id = result["flow_id"] await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -652,16 +658,16 @@ async def test_cloud_errors( "expires_in": 60, }, ) - await hass.config_entries.flow.async_configure(flow_id) + await hass.config_entries.flow.async_configure(result["flow_id"]) mock_lametric.device.side_effect = side_effect - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result2.get("type") is FlowResultType.FORM - assert result2.get("step_id") == "cloud_select_device" - assert result2.get("errors") == {"base": reason} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_select_device" + assert result["errors"] == {"base": reason} assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 @@ -669,19 +675,21 @@ async def test_cloud_errors( assert len(mock_setup_entry.mock_calls) == 0 mock_lametric.device.side_effect = None - result3 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_DEVICE: "SA110405124500W00BS9"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_DEVICE: "SA110405124500W00BS9"} ) - assert result3.get("type") is FlowResultType.CREATE_ENTRY - assert result3.get("title") == "Frenck's LaMetric" - assert result3.get("data") == { + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.title == "Frenck's LaMetric" + assert config_entry.unique_id == "SA110405124500W00BS9" + assert config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", CONF_MAC: "AA:BB:CC:DD:EE:FF", } - assert "result" in result3 - assert result3["result"].unique_id == "SA110405124500W00BS9" + assert not config_entry.options assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 2 @@ -706,8 +714,8 @@ async def test_dhcp_discovery_updates_entry( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" assert mock_config_entry.data == { CONF_API_KEY: "mock-from-fixture", CONF_HOST: "127.0.0.42", @@ -732,8 +740,8 @@ async def test_dhcp_unknown_device( ), ) - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "unknown" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" @pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") @@ -750,16 +758,14 @@ async def test_reauth_cloud_import( result = await mock_config_entry.start_reauth_flow(hass) - flow_id = result["flow_id"] - await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -776,10 +782,10 @@ async def test_reauth_cloud_import( }, ) - result2 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -806,16 +812,14 @@ async def test_reauth_cloud_abort_device_not_found( result = await mock_config_entry.start_reauth_flow(hass) - flow_id = result["flow_id"] - await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "pick_implementation"} + result["flow_id"], user_input={"next_step_id": "pick_implementation"} ) state = config_entry_oauth2_flow._encode_jwt( hass, { - "flow_id": flow_id, + "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) @@ -832,10 +836,10 @@ async def test_reauth_cloud_abort_device_not_found( }, ) - result2 = await hass.config_entries.flow.async_configure(flow_id) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_device_not_found" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_device_not_found" assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 0 @@ -853,18 +857,16 @@ async def test_reauth_manual( result = await mock_config_entry.start_reauth_flow(hass) - flow_id = result["flow_id"] - await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_KEY: "mock-api-key"} ) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", @@ -887,18 +889,16 @@ async def test_reauth_manual_sky( result = await mock_config_entry.start_reauth_flow(hass) - flow_id = result["flow_id"] - await hass.config_entries.flow.async_configure( - flow_id, user_input={"next_step_id": "manual_entry"} + result["flow_id"], user_input={"next_step_id": "manual_entry"} ) - result2 = await hass.config_entries.flow.async_configure( - flow_id, user_input={CONF_API_KEY: "mock-api-key"} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_KEY: "mock-api-key"} ) - assert result2.get("type") is FlowResultType.ABORT - assert result2.get("reason") == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert mock_config_entry.data == { CONF_HOST: "127.0.0.1", CONF_API_KEY: "mock-api-key", diff --git a/tests/components/lametric/test_init.py b/tests/components/lametric/test_init.py index 7352721e992..2fd8219ea51 100644 --- a/tests/components/lametric/test_init.py +++ b/tests/components/lametric/test_init.py @@ -74,7 +74,7 @@ async def test_config_entry_authentication_failed( assert len(flows) == 1 flow = flows[0] - assert flow.get("step_id") == "choice_enter_manual_or_fetch_cloud" + assert flow["step_id"] == "choice_enter_manual_or_fetch_cloud" assert flow.get("handler") == DOMAIN assert "context" in flow From 308200781f16b7f4a75f45c8b7705361852e76d0 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Mon, 16 Dec 2024 14:49:15 -0600 Subject: [PATCH 320/677] Add required domain to vacuum intents (#133166) --- homeassistant/components/vacuum/intent.py | 2 ++ tests/components/vacuum/test_intent.py | 42 +++++++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/homeassistant/components/vacuum/intent.py b/homeassistant/components/vacuum/intent.py index 8952c13875d..48340252b6e 100644 --- a/homeassistant/components/vacuum/intent.py +++ b/homeassistant/components/vacuum/intent.py @@ -18,6 +18,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None: DOMAIN, SERVICE_START, description="Starts a vacuum", + required_domains={DOMAIN}, platforms={DOMAIN}, ), ) @@ -28,6 +29,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None: DOMAIN, SERVICE_RETURN_TO_BASE, description="Returns a vacuum to base", + required_domains={DOMAIN}, platforms={DOMAIN}, ), ) diff --git a/tests/components/vacuum/test_intent.py b/tests/components/vacuum/test_intent.py index cf96d32ad49..9ede7dbc04e 100644 --- a/tests/components/vacuum/test_intent.py +++ b/tests/components/vacuum/test_intent.py @@ -37,6 +37,27 @@ async def test_start_vacuum_intent(hass: HomeAssistant) -> None: assert call.data == {"entity_id": entity_id} +async def test_start_vacuum_without_name(hass: HomeAssistant) -> None: + """Test starting a vacuum without specifying the name.""" + await vacuum_intent.async_setup_intents(hass) + + entity_id = f"{DOMAIN}.test_vacuum" + hass.states.async_set(entity_id, STATE_IDLE) + calls = async_mock_service(hass, DOMAIN, SERVICE_START) + + response = await intent.async_handle( + hass, "test", vacuum_intent.INTENT_VACUUM_START, {} + ) + await hass.async_block_till_done() + + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert len(calls) == 1 + call = calls[0] + assert call.domain == DOMAIN + assert call.service == SERVICE_START + assert call.data == {"entity_id": entity_id} + + async def test_stop_vacuum_intent(hass: HomeAssistant) -> None: """Test HassTurnOff intent for vacuums.""" await vacuum_intent.async_setup_intents(hass) @@ -59,3 +80,24 @@ async def test_stop_vacuum_intent(hass: HomeAssistant) -> None: assert call.domain == DOMAIN assert call.service == SERVICE_RETURN_TO_BASE assert call.data == {"entity_id": entity_id} + + +async def test_stop_vacuum_without_name(hass: HomeAssistant) -> None: + """Test stopping a vacuum without specifying the name.""" + await vacuum_intent.async_setup_intents(hass) + + entity_id = f"{DOMAIN}.test_vacuum" + hass.states.async_set(entity_id, STATE_IDLE) + calls = async_mock_service(hass, DOMAIN, SERVICE_RETURN_TO_BASE) + + response = await intent.async_handle( + hass, "test", vacuum_intent.INTENT_VACUUM_RETURN_TO_BASE, {} + ) + await hass.async_block_till_done() + + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert len(calls) == 1 + call = calls[0] + assert call.domain == DOMAIN + assert call.service == SERVICE_RETURN_TO_BASE + assert call.data == {"entity_id": entity_id} From 8c67819f507d823d1868d958e4d86b7bc37e125b Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 16 Dec 2024 22:40:00 +0100 Subject: [PATCH 321/677] Update axis to v64 (#133385) --- homeassistant/components/axis/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/axis/manifest.json b/homeassistant/components/axis/manifest.json index 7163437361a..9758af60178 100644 --- a/homeassistant/components/axis/manifest.json +++ b/homeassistant/components/axis/manifest.json @@ -29,7 +29,7 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["axis"], - "requirements": ["axis==63"], + "requirements": ["axis==64"], "ssdp": [ { "manufacturer": "AXIS" diff --git a/requirements_all.txt b/requirements_all.txt index 5eecf96d096..c4e9529c6c8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -539,7 +539,7 @@ av==13.1.0 # avion==0.10 # homeassistant.components.axis -axis==63 +axis==64 # homeassistant.components.fujitsu_fglair ayla-iot-unofficial==1.4.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c10645dc293..056d7422195 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -488,7 +488,7 @@ automower-ble==0.2.0 av==13.1.0 # homeassistant.components.axis -axis==63 +axis==64 # homeassistant.components.fujitsu_fglair ayla-iot-unofficial==1.4.4 From 9cdc36681a30d537020d2c4fca2cac47f718b240 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 16 Dec 2024 23:01:24 +0100 Subject: [PATCH 322/677] Remove setup entry mock assert from LaMetric config flow (#133387) --- tests/components/lametric/conftest.py | 4 +-- tests/components/lametric/test_config_flow.py | 28 +++++-------------- 2 files changed, 9 insertions(+), 23 deletions(-) diff --git a/tests/components/lametric/conftest.py b/tests/components/lametric/conftest.py index c460834be6c..da86d1bc4de 100644 --- a/tests/components/lametric/conftest.py +++ b/tests/components/lametric/conftest.py @@ -49,8 +49,8 @@ def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" with patch( "homeassistant.components.lametric.async_setup_entry", return_value=True - ) as mock_setup: - yield mock_setup + ): + yield @pytest.fixture diff --git a/tests/components/lametric/test_config_flow.py b/tests/components/lametric/test_config_flow.py index 4a546122e30..ccbbe005639 100644 --- a/tests/components/lametric/test_config_flow.py +++ b/tests/components/lametric/test_config_flow.py @@ -41,12 +41,11 @@ SSDP_DISCOVERY_INFO = SsdpServiceInfo( ) -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") async def test_full_cloud_import_flow_multiple_devices( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, ) -> None: @@ -119,15 +118,13 @@ async def test_full_cloud_import_flow_multiple_devices( assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") async def test_full_cloud_import_flow_single_device( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, ) -> None: @@ -198,12 +195,11 @@ async def test_full_cloud_import_flow_single_device( assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_full_manual( hass: HomeAssistant, - mock_setup_entry: MagicMock, mock_lametric: MagicMock, ) -> None: """Check a full flow manual entry.""" @@ -246,15 +242,12 @@ async def test_full_manual( notification: Notification = mock_lametric.notify.mock_calls[0][2]["notification"] assert notification.model.sound == Sound(sound=NotificationSound.WIN) - assert len(mock_setup_entry.mock_calls) == 1 - -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") async def test_full_ssdp_with_cloud_import( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, ) -> None: @@ -319,12 +312,11 @@ async def test_full_ssdp_with_cloud_import( assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_full_ssdp_manual_entry( hass: HomeAssistant, - mock_setup_entry: MagicMock, mock_lametric: MagicMock, ) -> None: """Check a full flow triggered by SSDP, with manual API key entry.""" @@ -361,7 +353,6 @@ async def test_full_ssdp_manual_entry( assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 @pytest.mark.parametrize( @@ -549,6 +540,7 @@ async def test_cloud_abort_no_devices( assert len(mock_lametric_cloud.devices.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( ("side_effect", "reason"), [ @@ -561,7 +553,6 @@ async def test_cloud_abort_no_devices( async def test_manual_errors( hass: HomeAssistant, mock_lametric: MagicMock, - mock_setup_entry: MagicMock, side_effect: Exception, reason: str, ) -> None: @@ -586,7 +577,6 @@ async def test_manual_errors( assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 mock_lametric.device.side_effect = None result = await hass.config_entries.flow.async_configure( @@ -608,10 +598,9 @@ async def test_manual_errors( assert len(mock_lametric.device.mock_calls) == 2 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_setup_entry") @pytest.mark.parametrize( ("side_effect", "reason"), [ @@ -625,7 +614,6 @@ async def test_cloud_errors( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_setup_entry: MagicMock, mock_lametric_cloud: MagicMock, mock_lametric: MagicMock, side_effect: Exception, @@ -672,7 +660,6 @@ async def test_cloud_errors( assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 1 assert len(mock_lametric.notify.mock_calls) == 0 - assert len(mock_setup_entry.mock_calls) == 0 mock_lametric.device.side_effect = None result = await hass.config_entries.flow.async_configure( @@ -694,7 +681,6 @@ async def test_cloud_errors( assert len(mock_lametric_cloud.devices.mock_calls) == 1 assert len(mock_lametric.device.mock_calls) == 2 assert len(mock_lametric.notify.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 async def test_dhcp_discovery_updates_entry( From a374c7e4ca6bdf243a7b697fa68972b2582afea6 Mon Sep 17 00:00:00 2001 From: Dan Raper Date: Mon, 16 Dec 2024 22:54:33 +0000 Subject: [PATCH 323/677] Add reauth flow to Ohme (#133275) * Add reauth flow to ohme * Reuse config flow user step for reauth * Tidying up * Add common _validate_account method for reauth and user config flow steps * Add reauth fail test --- homeassistant/components/ohme/__init__.py | 4 +- homeassistant/components/ohme/config_flow.py | 68 +++++++++++++++-- homeassistant/components/ohme/manifest.json | 2 +- .../components/ohme/quality_scale.yaml | 2 +- homeassistant/components/ohme/strings.json | 13 +++- tests/components/ohme/test_config_flow.py | 74 +++++++++++++++++++ 6 files changed, 150 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/ohme/__init__.py b/homeassistant/components/ohme/__init__.py index 8ca983cd72a..4dc75cb574c 100644 --- a/homeassistant/components/ohme/__init__.py +++ b/homeassistant/components/ohme/__init__.py @@ -7,7 +7,7 @@ from ohme import ApiException, AuthException, OhmeApiClient from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from .const import DOMAIN, PLATFORMS from .coordinator import OhmeAdvancedSettingsCoordinator, OhmeChargeSessionCoordinator @@ -36,7 +36,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: OhmeConfigEntry) -> bool translation_key="device_info_failed", translation_domain=DOMAIN ) except AuthException as e: - raise ConfigEntryError( + raise ConfigEntryAuthFailed( translation_key="auth_failed", translation_domain=DOMAIN ) from e except ApiException as e: diff --git a/homeassistant/components/ohme/config_flow.py b/homeassistant/components/ohme/config_flow.py index ea110f6df23..748ea558983 100644 --- a/homeassistant/components/ohme/config_flow.py +++ b/homeassistant/components/ohme/config_flow.py @@ -1,5 +1,6 @@ """Config flow for ohme integration.""" +from collections.abc import Mapping from typing import Any from ohme import ApiException, AuthException, OhmeApiClient @@ -32,6 +33,17 @@ USER_SCHEMA = vol.Schema( } ) +REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ), + ), + } +) + class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): """Config flow.""" @@ -46,14 +58,9 @@ class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) - instance = OhmeApiClient(user_input[CONF_EMAIL], user_input[CONF_PASSWORD]) - try: - await instance.async_login() - except AuthException: - errors["base"] = "invalid_auth" - except ApiException: - errors["base"] = "unknown" - + errors = await self._validate_account( + user_input[CONF_EMAIL], user_input[CONF_PASSWORD] + ) if not errors: return self.async_create_entry( title=user_input[CONF_EMAIL], data=user_input @@ -62,3 +69,48 @@ class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=USER_SCHEMA, errors=errors ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle re-authentication.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication confirmation.""" + errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() + if user_input is not None: + errors = await self._validate_account( + reauth_entry.data[CONF_EMAIL], + user_input[CONF_PASSWORD], + ) + if not errors: + return self.async_update_reload_and_abort( + reauth_entry, + data_updates=user_input, + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=REAUTH_SCHEMA, + description_placeholders={"email": reauth_entry.data[CONF_EMAIL]}, + errors=errors, + ) + + async def _validate_account(self, email: str, password: str) -> dict[str, str]: + """Validate Ohme account and return dict of errors.""" + errors: dict[str, str] = {} + client = OhmeApiClient( + email, + password, + ) + try: + await client.async_login() + except AuthException: + errors["base"] = "invalid_auth" + except ApiException: + errors["base"] = "unknown" + + return errors diff --git a/homeassistant/components/ohme/manifest.json b/homeassistant/components/ohme/manifest.json index 2d387ce9e8a..c9e1ccf9ac2 100644 --- a/homeassistant/components/ohme/manifest.json +++ b/homeassistant/components/ohme/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/ohme/", "integration_type": "device", "iot_class": "cloud_polling", - "quality_scale": "bronze", + "quality_scale": "silver", "requirements": ["ohme==1.1.1"] } diff --git a/homeassistant/components/ohme/quality_scale.yaml b/homeassistant/components/ohme/quality_scale.yaml index 15697cb11a3..7fc2f55e2f9 100644 --- a/homeassistant/components/ohme/quality_scale.yaml +++ b/homeassistant/components/ohme/quality_scale.yaml @@ -40,7 +40,7 @@ rules: integration-owner: done log-when-unavailable: done parallel-updates: done - reauthentication-flow: todo + reauthentication-flow: done test-coverage: done # Gold diff --git a/homeassistant/components/ohme/strings.json b/homeassistant/components/ohme/strings.json index 42e0a60b83e..125babc1901 100644 --- a/homeassistant/components/ohme/strings.json +++ b/homeassistant/components/ohme/strings.json @@ -11,6 +11,16 @@ "email": "Enter the email address associated with your Ohme account.", "password": "Enter the password for your Ohme account" } + }, + "reauth_confirm": { + "description": "Please update your password for {email}", + "title": "[%key:common::config_flow::title::reauth%]", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "Enter the password for your Ohme account" + } } }, "error": { @@ -18,7 +28,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/tests/components/ohme/test_config_flow.py b/tests/components/ohme/test_config_flow.py index b9d4a10a76e..bb7ecc00bdc 100644 --- a/tests/components/ohme/test_config_flow.py +++ b/tests/components/ohme/test_config_flow.py @@ -108,3 +108,77 @@ async def test_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reauth_form(hass: HomeAssistant, mock_client: MagicMock) -> None: + """Test reauth form.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + }, + ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + assert not result["errors"] + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter2"}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +@pytest.mark.parametrize( + ("test_exception", "expected_error"), + [(AuthException, "invalid_auth"), (ApiException, "unknown")], +) +async def test_reauth_fail( + hass: HomeAssistant, + mock_client: MagicMock, + test_exception: Exception, + expected_error: str, +) -> None: + """Test reauth errors.""" + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + }, + ) + entry.add_to_hass(hass) + + # Initial form load + result = await entry.start_reauth_flow(hass) + + assert result["step_id"] == "reauth_confirm" + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Failed login + mock_client.async_login.side_effect = test_exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + # End with success + mock_client.async_login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter2"}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" From 73e3e91af25d9244ee3a3e5672f1a9ac8837df8d Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 16 Dec 2024 23:54:56 +0100 Subject: [PATCH 324/677] Nord Pool iqs platinum (#133389) --- homeassistant/components/nordpool/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/nordpool/manifest.json b/homeassistant/components/nordpool/manifest.json index b3a18eb040a..215494e10a0 100644 --- a/homeassistant/components/nordpool/manifest.json +++ b/homeassistant/components/nordpool/manifest.json @@ -7,6 +7,7 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["pynordpool"], + "quality_scale": "platinum", "requirements": ["pynordpool==0.2.3"], "single_config_entry": true } From 1512cd5fb7a52e11f594caf6723a78396cd749da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ludovic=20BOU=C3=89?= Date: Tue, 17 Dec 2024 00:03:32 +0100 Subject: [PATCH 325/677] Add Matter battery replacement description (#132974) --- homeassistant/components/matter/icons.json | 3 + homeassistant/components/matter/sensor.py | 14 + homeassistant/components/matter/strings.json | 3 + .../matter/snapshots/test_sensor.ambr | 276 ++++++++++++++++++ tests/components/matter/test_sensor.py | 20 ++ 5 files changed, 316 insertions(+) diff --git a/homeassistant/components/matter/icons.json b/homeassistant/components/matter/icons.json index 32c9f057e47..adcdcd05137 100644 --- a/homeassistant/components/matter/icons.json +++ b/homeassistant/components/matter/icons.json @@ -43,6 +43,9 @@ "air_quality": { "default": "mdi:air-filter" }, + "bat_replacement_description": { + "default": "mdi:battery-sync" + }, "hepa_filter_condition": { "default": "mdi:filter-check" }, diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index b2a5da2aa71..d71cd52a0c6 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -231,6 +231,20 @@ DISCOVERY_SCHEMAS = [ entity_class=MatterSensor, required_attributes=(clusters.PowerSource.Attributes.BatVoltage,), ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="PowerSourceBatReplacementDescription", + translation_key="battery_replacement_description", + native_unit_of_measurement=None, + device_class=None, + entity_category=EntityCategory.DIAGNOSTIC, + ), + entity_class=MatterSensor, + required_attributes=( + clusters.PowerSource.Attributes.BatReplacementDescription, + ), + ), MatterDiscoverySchema( platform=Platform.SENSOR, entity_description=MatterSensorEntityDescription( diff --git a/homeassistant/components/matter/strings.json b/homeassistant/components/matter/strings.json index 69fa68765b3..ca15538997e 100644 --- a/homeassistant/components/matter/strings.json +++ b/homeassistant/components/matter/strings.json @@ -245,6 +245,9 @@ }, "valve_position": { "name": "Valve position" + }, + "battery_replacement_description": { + "name": "Battery type" } }, "switch": { diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index 44ad02d4b1e..60a3d33a130 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -1145,6 +1145,98 @@ 'state': '189.0', }) # --- +# name: test_sensors[door_lock][sensor.mock_door_lock_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[door_lock][sensor.mock_door_lock_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock Battery type', + }), + 'context': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- +# name: test_sensors[door_lock_with_unbolt][sensor.mock_door_lock_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[door_lock_with_unbolt][sensor.mock_door_lock_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Door Lock Battery type', + }), + 'context': , + 'entity_id': 'sensor.mock_door_lock_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- # name: test_sensors[eve_contact_sensor][sensor.eve_door_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1196,6 +1288,52 @@ 'state': '100', }) # --- +# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_door_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[eve_contact_sensor][sensor.eve_door_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Door Battery type', + }), + 'context': , + 'entity_id': 'sensor.eve_door_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- # name: test_sensors[eve_contact_sensor][sensor.eve_door_voltage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1733,6 +1871,52 @@ 'state': '100', }) # --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_thermo_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000021-MatterNodeDevice-0-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[eve_thermo][sensor.eve_thermo_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Thermo Battery type', + }), + 'context': , + 'entity_id': 'sensor.eve_thermo_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- # name: test_sensors[eve_thermo][sensor.eve_thermo_valve_position-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1882,6 +2066,52 @@ 'state': '100', }) # --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.eve_weather_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-0-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Eve Weather Battery type', + }), + 'context': , + 'entity_id': 'sensor.eve_weather_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- # name: test_sensors[eve_weather_sensor][sensor.eve_weather_humidity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2735,6 +2965,52 @@ 'state': '94', }) # --- +# name: test_sensors[smoke_detector][sensor.smoke_sensor_battery_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.smoke_sensor_battery_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Battery type', + 'platform': 'matter', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_replacement_description', + 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-PowerSourceBatReplacementDescription-47-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[smoke_detector][sensor.smoke_sensor_battery_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Smoke sensor Battery type', + }), + 'context': , + 'entity_id': 'sensor.smoke_sensor_battery_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'CR123A', + }) +# --- # name: test_sensors[smoke_detector][sensor.smoke_sensor_voltage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/matter/test_sensor.py b/tests/components/matter/test_sensor.py index 27eb7da2c71..3215ec58116 100644 --- a/tests/components/matter/test_sensor.py +++ b/tests/components/matter/test_sensor.py @@ -174,6 +174,26 @@ async def test_battery_sensor_voltage( assert entry.entity_category == EntityCategory.DIAGNOSTIC +@pytest.mark.parametrize("node_fixture", ["smoke_detector"]) +async def test_battery_sensor_description( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test battery replacement description sensor.""" + state = hass.states.get("sensor.smoke_sensor_battery_type") + assert state + assert state.state == "CR123A" + + set_node_attribute(matter_node, 1, 47, 19, "CR2032") + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("sensor.smoke_sensor_battery_type") + assert state + assert state.state == "CR2032" + + @pytest.mark.parametrize("node_fixture", ["eve_thermo"]) async def test_eve_thermo_sensor( hass: HomeAssistant, From 2d8e693cdbbc5877f130e5e3fdfea859ff08f4b5 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 17 Dec 2024 07:34:59 +0100 Subject: [PATCH 326/677] Update mypy-dev to 1.14.0a7 (#133390) --- homeassistant/components/image/__init__.py | 2 +- mypy.ini | 1 + requirements_test.txt | 2 +- script/hassfest/mypy_config.py | 1 + 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/image/__init__.py b/homeassistant/components/image/__init__.py index dbb5962eabf..ea235127894 100644 --- a/homeassistant/components/image/__init__.py +++ b/homeassistant/components/image/__init__.py @@ -348,7 +348,7 @@ async def async_get_still_stream( # While this results in additional bandwidth usage, # given the low frequency of image updates, it is acceptable. frame.extend(frame) - await response.write(frame) + await response.write(frame) # type: ignore[arg-type] return True event = asyncio.Event() diff --git a/mypy.ini b/mypy.ini index e76bc97585c..15b96e0a802 100644 --- a/mypy.ini +++ b/mypy.ini @@ -10,6 +10,7 @@ show_error_codes = true follow_imports = normal local_partial_types = true strict_equality = true +strict_bytes = true no_implicit_optional = true warn_incomplete_stub = true warn_redundant_casts = true diff --git a/requirements_test.txt b/requirements_test.txt index 50e5957bf96..98a948cd56e 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -12,7 +12,7 @@ coverage==7.6.8 freezegun==1.5.1 license-expression==30.4.0 mock-open==1.4.0 -mypy-dev==1.14.0a6 +mypy-dev==1.14.0a7 pre-commit==4.0.0 pydantic==2.10.3 pylint==3.3.2 diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index 5767066c943..1d7f2b5ed88 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -47,6 +47,7 @@ GENERAL_SETTINGS: Final[dict[str, str]] = { # Enable some checks globally. "local_partial_types": "true", "strict_equality": "true", + "strict_bytes": "true", "no_implicit_optional": "true", "warn_incomplete_stub": "true", "warn_redundant_casts": "true", From fc9d32ef65402e77add31c40bc55bc1e664e6390 Mon Sep 17 00:00:00 2001 From: Vivien Chene Date: Tue, 17 Dec 2024 07:57:43 +0000 Subject: [PATCH 327/677] Fix issue when no data, where the integer sensor value is given a string (#132123) * Fix issue when no data, where the integer sensor value is given a string * Use None and not '0' --- homeassistant/components/irish_rail_transport/sensor.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/irish_rail_transport/sensor.py b/homeassistant/components/irish_rail_transport/sensor.py index 39bf39bcbe0..2765a14b7a3 100644 --- a/homeassistant/components/irish_rail_transport/sensor.py +++ b/homeassistant/components/irish_rail_transport/sensor.py @@ -194,9 +194,9 @@ class IrishRailTransportData: ATTR_STATION: self.station, ATTR_ORIGIN: "", ATTR_DESTINATION: dest, - ATTR_DUE_IN: "n/a", - ATTR_DUE_AT: "n/a", - ATTR_EXPECT_AT: "n/a", + ATTR_DUE_IN: None, + ATTR_DUE_AT: None, + ATTR_EXPECT_AT: None, ATTR_DIRECTION: direction, ATTR_STOPS_AT: stops_at, ATTR_TRAIN_TYPE: "", From 9ca9e787b238df3013e0a29d8a546bc7e9993629 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Tue, 17 Dec 2024 09:07:18 +0100 Subject: [PATCH 328/677] Add tests for Habitica integration (#131780) * Add tests for Habitica integration * update iqs --- .../components/habitica/quality_scale.yaml | 2 +- tests/components/habitica/fixtures/tasks.json | 50 +++++++++++++ tests/components/habitica/fixtures/user.json | 3 +- .../habitica/snapshots/test_calendar.ambr | 24 +++++-- .../habitica/snapshots/test_diagnostics.ambr | 61 ++++++++++++++++ .../habitica/snapshots/test_sensor.ambr | 41 ++++++++++- .../habitica/snapshots/test_todo.ambr | 9 ++- tests/components/habitica/test_button.py | 71 ++++++++++++++++++- tests/components/habitica/test_calendar.py | 15 +++- 9 files changed, 266 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/habitica/quality_scale.yaml b/homeassistant/components/habitica/quality_scale.yaml index cf54672bfed..9d505b85b8c 100644 --- a/homeassistant/components/habitica/quality_scale.yaml +++ b/homeassistant/components/habitica/quality_scale.yaml @@ -35,7 +35,7 @@ rules: log-when-unavailable: done parallel-updates: todo reauthentication-flow: todo - test-coverage: todo + test-coverage: done # Gold devices: done diff --git a/tests/components/habitica/fixtures/tasks.json b/tests/components/habitica/fixtures/tasks.json index 7784b9c7f49..a4942063612 100644 --- a/tests/components/habitica/fixtures/tasks.json +++ b/tests/components/habitica/fixtures/tasks.json @@ -532,6 +532,56 @@ "updatedAt": "2024-07-07T17:51:53.266Z", "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", "id": "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b" + }, + { + "repeat": { + "m": false, + "t": false, + "w": false, + "th": false, + "f": false, + "s": false, + "su": true + }, + "challenge": {}, + "group": { + "completedBy": {}, + "assignedUsers": [] + }, + "_id": "6e53f1f5-a315-4edd-984d-8d762e4a08ef", + "frequency": "monthly", + "everyX": 1, + "streak": 1, + "nextDue": [ + "2024-12-14T23:00:00.000Z", + "2025-01-18T23:00:00.000Z", + "2025-02-15T23:00:00.000Z", + "2025-03-15T23:00:00.000Z", + "2025-04-19T23:00:00.000Z", + "2025-05-17T23:00:00.000Z" + ], + "yesterDaily": true, + "history": [], + "completed": false, + "collapseChecklist": false, + "type": "daily", + "text": "Arbeite an einem kreativen Projekt", + "notes": "Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!", + "tags": [], + "value": -0.9215181434950852, + "priority": 1, + "attribute": "str", + "byHabitica": false, + "startDate": "2024-09-20T23:00:00.000Z", + "daysOfMonth": [], + "weeksOfMonth": [3], + "checklist": [], + "reminders": [], + "createdAt": "2024-10-10T15:57:14.304Z", + "updatedAt": "2024-11-27T23:47:29.986Z", + "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", + "isDue": false, + "id": "6e53f1f5-a315-4edd-984d-8d762e4a08ef" } ], "notifications": [ diff --git a/tests/components/habitica/fixtures/user.json b/tests/components/habitica/fixtures/user.json index a498de910ef..ed41a306a03 100644 --- a/tests/components/habitica/fixtures/user.json +++ b/tests/components/habitica/fixtures/user.json @@ -55,7 +55,8 @@ "e97659e0-2c42-4599-a7bb-00282adc410d", "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", "f2c85972-1a19-4426-bc6d-ce3337b9d99f", - "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" + "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1", + "6e53f1f5-a315-4edd-984d-8d762e4a08ef" ], "habits": ["1d147de6-5c02-4740-8e2f-71d3015a37f4"] }, diff --git a/tests/components/habitica/snapshots/test_calendar.ambr b/tests/components/habitica/snapshots/test_calendar.ambr index c2f9c8e83c9..5e010a33c84 100644 --- a/tests/components/habitica/snapshots/test_calendar.ambr +++ b/tests/components/habitica/snapshots/test_calendar.ambr @@ -1,5 +1,21 @@ # serializer version: 1 -# name: test_api_events[calendar.test_user_dailies] +# name: test_api_events[date range in the past-calendar.test_user_dailies] + list([ + ]) +# --- +# name: test_api_events[date range in the past-calendar.test_user_daily_reminders] + list([ + ]) +# --- +# name: test_api_events[date range in the past-calendar.test_user_to_do_reminders] + list([ + ]) +# --- +# name: test_api_events[date range in the past-calendar.test_user_to_do_s] + list([ + ]) +# --- +# name: test_api_events[default date range-calendar.test_user_dailies] list([ dict({ 'description': 'Klicke um Deinen Terminplan festzulegen!', @@ -577,7 +593,7 @@ }), ]) # --- -# name: test_api_events[calendar.test_user_daily_reminders] +# name: test_api_events[default date range-calendar.test_user_daily_reminders] list([ dict({ 'description': 'Klicke um Deinen Terminplan festzulegen!', @@ -819,7 +835,7 @@ }), ]) # --- -# name: test_api_events[calendar.test_user_to_do_reminders] +# name: test_api_events[default date range-calendar.test_user_to_do_reminders] list([ dict({ 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', @@ -837,7 +853,7 @@ }), ]) # --- -# name: test_api_events[calendar.test_user_to_do_s] +# name: test_api_events[default date range-calendar.test_user_to_do_s] list([ dict({ 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', diff --git a/tests/components/habitica/snapshots/test_diagnostics.ambr b/tests/components/habitica/snapshots/test_diagnostics.ambr index bb9371a4c68..0d5f07d9a6c 100644 --- a/tests/components/habitica/snapshots/test_diagnostics.ambr +++ b/tests/components/habitica/snapshots/test_diagnostics.ambr @@ -615,6 +615,66 @@ 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', 'value': 10, }), + dict({ + '_id': '6e53f1f5-a315-4edd-984d-8d762e4a08ef', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-10-10T15:57:14.304Z', + 'daysOfMonth': list([ + ]), + 'everyX': 1, + 'frequency': 'monthly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + ]), + 'id': '6e53f1f5-a315-4edd-984d-8d762e4a08ef', + 'isDue': False, + 'nextDue': list([ + '2024-12-14T23:00:00.000Z', + '2025-01-18T23:00:00.000Z', + '2025-02-15T23:00:00.000Z', + '2025-03-15T23:00:00.000Z', + '2025-04-19T23:00:00.000Z', + '2025-05-17T23:00:00.000Z', + ]), + 'notes': 'Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!', + 'priority': 1, + 'reminders': list([ + ]), + 'repeat': dict({ + 'f': False, + 'm': False, + 's': False, + 'su': True, + 't': False, + 'th': False, + 'w': False, + }), + 'startDate': '2024-09-20T23:00:00.000Z', + 'streak': 1, + 'tags': list([ + ]), + 'text': 'Arbeite an einem kreativen Projekt', + 'type': 'daily', + 'updatedAt': '2024-11-27T23:47:29.986Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': -0.9215181434950852, + 'weeksOfMonth': list([ + 3, + ]), + 'yesterDaily': True, + }), ]), 'user': dict({ 'api_user': 'test-api-user', @@ -695,6 +755,7 @@ '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + '6e53f1f5-a315-4edd-984d-8d762e4a08ef', ]), 'habits': list([ '1d147de6-5c02-4740-8e2f-71d3015a37f4', diff --git a/tests/components/habitica/snapshots/test_sensor.ambr b/tests/components/habitica/snapshots/test_sensor.ambr index 28dd7eb8c43..7e72d486276 100644 --- a/tests/components/habitica/snapshots/test_sensor.ambr +++ b/tests/components/habitica/snapshots/test_sensor.ambr @@ -226,6 +226,45 @@ 'value': -2.9663035443712333, 'yester_daily': True, }), + '6e53f1f5-a315-4edd-984d-8d762e4a08ef': dict({ + 'created_at': '2024-10-10T15:57:14.304Z', + 'every_x': 1, + 'frequency': 'monthly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'next_due': list([ + '2024-12-14T23:00:00.000Z', + '2025-01-18T23:00:00.000Z', + '2025-02-15T23:00:00.000Z', + '2025-03-15T23:00:00.000Z', + '2025-04-19T23:00:00.000Z', + '2025-05-17T23:00:00.000Z', + ]), + 'notes': 'Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!', + 'priority': 1, + 'repeat': dict({ + 'f': False, + 'm': False, + 's': False, + 'su': True, + 't': False, + 'th': False, + 'w': False, + }), + 'start_date': '2024-09-20T23:00:00.000Z', + 'streak': 1, + 'text': 'Arbeite an einem kreativen Projekt', + 'type': 'daily', + 'value': -0.9215181434950852, + 'weeks_of_month': list([ + 3, + ]), + 'yester_daily': True, + }), 'f2c85972-1a19-4426-bc6d-ce3337b9d99f': dict({ 'created_at': '2024-07-07T17:51:53.266Z', 'every_x': 1, @@ -270,7 +309,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3', + 'state': '4', }) # --- # name: test_sensors[sensor.test_user_display_name-entry] diff --git a/tests/components/habitica/snapshots/test_todo.ambr b/tests/components/habitica/snapshots/test_todo.ambr index 79eca9dbbb0..8c49cad5436 100644 --- a/tests/components/habitica/snapshots/test_todo.ambr +++ b/tests/components/habitica/snapshots/test_todo.ambr @@ -42,6 +42,13 @@ 'summary': 'Fitnessstudio besuchen', 'uid': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', }), + dict({ + 'description': 'Klicke um den Namen Deines aktuellen Projekts anzugeben & setze einen Terminplan!', + 'due': '2024-12-14', + 'status': 'needs_action', + 'summary': 'Arbeite an einem kreativen Projekt', + 'uid': '6e53f1f5-a315-4edd-984d-8d762e4a08ef', + }), ]), }), }) @@ -137,7 +144,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2', + 'state': '3', }) # --- # name: test_todos[todo.test_user_to_do_s-entry] diff --git a/tests/components/habitica/test_button.py b/tests/components/habitica/test_button.py index 979cefef923..09cc1c9d373 100644 --- a/tests/components/habitica/test_button.py +++ b/tests/components/habitica/test_button.py @@ -1,6 +1,7 @@ """Tests for Habitica button platform.""" from collections.abc import Generator +from datetime import timedelta from http import HTTPStatus import re from unittest.mock import patch @@ -15,10 +16,16 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er +import homeassistant.util.dt as dt_util from .conftest import mock_called_with -from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_object_fixture, + snapshot_platform, +) from tests.test_util.aiohttp import AiohttpClientMocker @@ -340,3 +347,65 @@ async def test_button_unavailable( for entity_id in entity_ids: assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNAVAILABLE + + +async def test_class_change( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test removing and adding skills after class change.""" + mage_skills = [ + "button.test_user_chilling_frost", + "button.test_user_earthquake", + "button.test_user_ethereal_surge", + ] + healer_skills = [ + "button.test_user_healing_light", + "button.test_user_protective_aura", + "button.test_user_searing_brightness", + "button.test_user_blessing", + ] + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture("wizard_fixture.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + params={"type": "completedTodos"}, + json=load_json_object_fixture("completed_todos.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/tasks/user", + json=load_json_object_fixture("tasks.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + for skill in mage_skills: + assert hass.states.get(skill) + + aioclient_mock._mocks.pop(0) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture("healer_fixture.json", DOMAIN), + ) + + async_fire_time_changed(hass, dt_util.now() + timedelta(seconds=60)) + await hass.async_block_till_done() + + for skill in mage_skills: + assert not hass.states.get(skill) + + for skill in healer_skills: + assert hass.states.get(skill) diff --git a/tests/components/habitica/test_calendar.py b/tests/components/habitica/test_calendar.py index a6cdb1a9306..ff3ffbeb80d 100644 --- a/tests/components/habitica/test_calendar.py +++ b/tests/components/habitica/test_calendar.py @@ -59,6 +59,17 @@ async def test_calendar_platform( "calendar.test_user_to_do_reminders", ], ) +@pytest.mark.parametrize( + ("start_date", "end_date"), + [ + ("2024-08-29", "2024-10-08"), + ("2023-08-01", "2023-08-02"), + ], + ids=[ + "default date range", + "date range in the past", + ], +) @pytest.mark.freeze_time("2024-09-20T22:00:00.000Z") @pytest.mark.usefixtures("mock_habitica") async def test_api_events( @@ -67,6 +78,8 @@ async def test_api_events( config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, entity: str, + start_date: str, + end_date: str, ) -> None: """Test calendar event.""" @@ -76,7 +89,7 @@ async def test_api_events( client = await hass_client() response = await client.get( - f"/api/calendars/{entity}?start=2024-08-29&end=2024-10-08" + f"/api/calendars/{entity}?start={start_date}&end={end_date}" ) assert await response.json() == snapshot From ac6d7180949358d8f8708ae4a903312ca0bb739d Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Tue, 17 Dec 2024 09:37:46 +0100 Subject: [PATCH 329/677] Fix mqtt reconfigure flow (#133315) * FIx mqtt reconfigure flow * Follow up on code review --- homeassistant/components/mqtt/config_flow.py | 17 ++++------- tests/components/mqtt/test_config_flow.py | 32 +++++--------------- 2 files changed, 13 insertions(+), 36 deletions(-) diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index ad3f3d35457..0081246c705 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -470,7 +470,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} fields: OrderedDict[Any, Any] = OrderedDict() validated_user_input: dict[str, Any] = {} - broker_config: dict[str, Any] = {} if is_reconfigure := (self.source == SOURCE_RECONFIGURE): reconfigure_entry = self._get_reconfigure_entry() if await async_get_broker_settings( @@ -482,29 +481,25 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors, ): if is_reconfigure: - broker_config.update( - update_password_from_user_input( - reconfigure_entry.data.get(CONF_PASSWORD), validated_user_input - ), + update_password_from_user_input( + reconfigure_entry.data.get(CONF_PASSWORD), validated_user_input ) - else: - broker_config = validated_user_input can_connect = await self.hass.async_add_executor_job( try_connection, - broker_config, + validated_user_input, ) if can_connect: if is_reconfigure: return self.async_update_reload_and_abort( reconfigure_entry, - data_updates=broker_config, + data=validated_user_input, ) validated_user_input[CONF_DISCOVERY] = DEFAULT_DISCOVERY return self.async_create_entry( - title=broker_config[CONF_BROKER], - data=broker_config, + title=validated_user_input[CONF_BROKER], + data=validated_user_input, ) errors["base"] = "cannot_connect" diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index fc1221956de..38dbda50cdd 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -2162,7 +2162,7 @@ async def test_setup_with_advanced_settings( async def test_change_websockets_transport_to_tcp( hass: HomeAssistant, mock_try_connection: MagicMock ) -> None: - """Test option flow setup with websockets transport settings.""" + """Test reconfiguration flow changing websockets transport settings.""" config_entry = MockConfigEntry(domain=mqtt.DOMAIN) config_entry.add_to_hass(hass) hass.config_entries.async_update_entry( @@ -2178,7 +2178,7 @@ async def test_change_websockets_transport_to_tcp( mock_try_connection.return_value = True - result = await hass.config_entries.options.async_init(config_entry.entry_id) + result = await config_entry.start_reconfigure_flow(hass, show_advanced_options=True) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "broker" assert result["data_schema"].schema["transport"] @@ -2186,7 +2186,7 @@ async def test_change_websockets_transport_to_tcp( assert result["data_schema"].schema["ws_headers"] # Change transport to tcp - result = await hass.config_entries.options.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ mqtt.CONF_BROKER: "test-broker", @@ -2196,25 +2196,14 @@ async def test_change_websockets_transport_to_tcp( mqtt.CONF_WS_PATH: "/some_path", }, ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "options" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - mqtt.CONF_DISCOVERY: True, - mqtt.CONF_DISCOVERY_PREFIX: "homeassistant_test", - }, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # Check config entry result assert config_entry.data == { mqtt.CONF_BROKER: "test-broker", CONF_PORT: 1234, mqtt.CONF_TRANSPORT: "tcp", - mqtt.CONF_DISCOVERY: True, - mqtt.CONF_DISCOVERY_PREFIX: "homeassistant_test", } @@ -2238,15 +2227,8 @@ async def test_reconfigure_flow_form( ) -> None: """Test reconfigure flow.""" await mqtt_mock_entry() - entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] - result = await hass.config_entries.flow.async_init( - mqtt.DOMAIN, - context={ - "source": config_entries.SOURCE_RECONFIGURE, - "entry_id": entry.entry_id, - "show_advanced_options": True, - }, - ) + entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + result = await entry.start_reconfigure_flow(hass, show_advanced_options=True) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "broker" assert result["errors"] == {} From c0264f73b0cbf6e6d582c983e4e92583cb136c1b Mon Sep 17 00:00:00 2001 From: dotvav Date: Tue, 17 Dec 2024 10:17:50 +0100 Subject: [PATCH 330/677] Add palazzetti status sensor (#131348) * Add status sensor * Lower the case of strings keys * Make const Final * Fix typo * Fix typo * Merge similar statuses * Increase readability * Update snapshot --- homeassistant/components/palazzetti/const.py | 52 +++++++ homeassistant/components/palazzetti/sensor.py | 19 ++- .../components/palazzetti/strings.json | 36 +++++ tests/components/palazzetti/conftest.py | 1 + .../palazzetti/snapshots/test_sensor.ambr | 146 ++++++++++++++++++ 5 files changed, 253 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/palazzetti/const.py b/homeassistant/components/palazzetti/const.py index 4cb8b1f14a6..b2e27b2a6fd 100644 --- a/homeassistant/components/palazzetti/const.py +++ b/homeassistant/components/palazzetti/const.py @@ -4,6 +4,8 @@ from datetime import timedelta import logging from typing import Final +from homeassistant.helpers.typing import StateType + DOMAIN: Final = "palazzetti" PALAZZETTI: Final = "Palazzetti" LOGGER = logging.getLogger(__package__) @@ -17,3 +19,53 @@ FAN_SILENT: Final = "silent" FAN_HIGH: Final = "high" FAN_AUTO: Final = "auto" FAN_MODES: Final = [FAN_SILENT, "1", "2", "3", "4", "5", FAN_HIGH, FAN_AUTO] + +STATUS_TO_HA: Final[dict[StateType, str]] = { + 0: "off", + 1: "off_timer", + 2: "test_fire", + 3: "heatup", + 4: "fueling", + 5: "ign_test", + 6: "burning", + 7: "burning_mod", + 8: "unknown", + 9: "cool_fluid", + 10: "fire_stop", + 11: "clean_fire", + 12: "cooling", + 50: "cleanup", + 51: "ecomode", + 241: "chimney_alarm", + 243: "grate_error", + 244: "pellet_water_error", + 245: "t05_error", + 247: "hatch_door_open", + 248: "pressure_error", + 249: "main_probe_failure", + 250: "flue_probe_failure", + 252: "exhaust_temp_high", + 253: "pellet_finished", + 501: "off", + 502: "fueling", + 503: "ign_test", + 504: "burning", + 505: "firewood_finished", + 506: "cooling", + 507: "clean_fire", + 1000: "general_error", + 1001: "general_error", + 1239: "door_open", + 1240: "temp_too_high", + 1241: "cleaning_warning", + 1243: "fuel_error", + 1244: "pellet_water_error", + 1245: "t05_error", + 1247: "hatch_door_open", + 1248: "pressure_error", + 1249: "main_probe_failure", + 1250: "flue_probe_failure", + 1252: "exhaust_temp_high", + 1253: "pellet_finished", + 1508: "general_error", +} diff --git a/homeassistant/components/palazzetti/sensor.py b/homeassistant/components/palazzetti/sensor.py index ead2b236b17..11462201f4e 100644 --- a/homeassistant/components/palazzetti/sensor.py +++ b/homeassistant/components/palazzetti/sensor.py @@ -14,6 +14,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from . import PalazzettiConfigEntry +from .const import STATUS_TO_HA from .coordinator import PalazzettiDataUpdateCoordinator from .entity import PalazzettiEntity @@ -23,10 +24,19 @@ class PropertySensorEntityDescription(SensorEntityDescription): """Describes a Palazzetti sensor entity that is read from a `PalazzettiClient` property.""" client_property: str + property_map: dict[StateType, str] | None = None presence_flag: None | str = None PROPERTY_SENSOR_DESCRIPTIONS: list[PropertySensorEntityDescription] = [ + PropertySensorEntityDescription( + key="status", + device_class=SensorDeviceClass.ENUM, + translation_key="status", + client_property="status", + property_map=STATUS_TO_HA, + options=list(STATUS_TO_HA.values()), + ), PropertySensorEntityDescription( key="pellet_quantity", device_class=SensorDeviceClass.WEIGHT, @@ -103,4 +113,11 @@ class PalazzettiSensor(PalazzettiEntity, SensorEntity): def native_value(self) -> StateType: """Return the state value of the sensor.""" - return getattr(self.coordinator.client, self.entity_description.client_property) + raw_value = getattr( + self.coordinator.client, self.entity_description.client_property + ) + + if self.entity_description.property_map: + return self.entity_description.property_map[raw_value] + + return raw_value diff --git a/homeassistant/components/palazzetti/strings.json b/homeassistant/components/palazzetti/strings.json index 60c6e20c402..ad7bc498bd1 100644 --- a/homeassistant/components/palazzetti/strings.json +++ b/homeassistant/components/palazzetti/strings.json @@ -57,6 +57,42 @@ } }, "sensor": { + "status": { + "name": "Status", + "state": { + "off": "Off", + "off_timer": "Timer-regulated switch off", + "test_fire": "Ignition test", + "heatup": "Pellet feed", + "fueling": "Ignition", + "ign_test": "Fuel check", + "burning": "Operating", + "burning_mod": "Operating - Modulating", + "unknown": "Unknown", + "cool_fluid": "Stand-by", + "fire_stop": "Switch off", + "clean_fire": "Burn pot cleaning", + "cooling": "Cooling in progress", + "cleanup": "Final cleaning", + "ecomode": "Ecomode", + "chimney_alarm": "Chimney alarm", + "grate_error": "Grate error", + "pellet_water_error": "Pellet probe or return water error", + "t05_error": "T05 error disconnected or faulty probe", + "hatch_door_open": "Feed hatch or door open", + "pressure_error": "Safety pressure switch error", + "main_probe_failure": "Main probe failure", + "flue_probe_failure": "Flue gas probe failure", + "exhaust_temp_high": "Too high exhaust gas temperature", + "pellet_finished": "Pellets finished or ignition failed", + "firewood_finished": "Firewood finished", + "general_error": "General error", + "door_open": "Door open", + "temp_too_high": "Temperature too high", + "cleaning_warning": "Cleaning warning", + "fuel_error": "Fuel error" + } + }, "pellet_quantity": { "name": "Pellet quantity" }, diff --git a/tests/components/palazzetti/conftest.py b/tests/components/palazzetti/conftest.py index a9f76b259c3..fad535df914 100644 --- a/tests/components/palazzetti/conftest.py +++ b/tests/components/palazzetti/conftest.py @@ -66,6 +66,7 @@ def mock_palazzetti_client() -> Generator[AsyncMock]: mock_client.has_on_off_switch = True mock_client.has_pellet_level = False mock_client.connected = True + mock_client.status = 6 mock_client.is_heating = True mock_client.room_temperature = 18 mock_client.T1 = 21.5 diff --git a/tests/components/palazzetti/snapshots/test_sensor.ambr b/tests/components/palazzetti/snapshots/test_sensor.ambr index 107b818f195..aa98f3a4f59 100644 --- a/tests/components/palazzetti/snapshots/test_sensor.ambr +++ b/tests/components/palazzetti/snapshots/test_sensor.ambr @@ -305,6 +305,152 @@ 'state': '21.5', }) # --- +# name: test_all_entities[sensor.stove_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'off_timer', + 'test_fire', + 'heatup', + 'fueling', + 'ign_test', + 'burning', + 'burning_mod', + 'unknown', + 'cool_fluid', + 'fire_stop', + 'clean_fire', + 'cooling', + 'cleanup', + 'ecomode', + 'chimney_alarm', + 'grate_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'off', + 'fueling', + 'ign_test', + 'burning', + 'firewood_finished', + 'cooling', + 'clean_fire', + 'general_error', + 'general_error', + 'door_open', + 'temp_too_high', + 'cleaning_warning', + 'fuel_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'general_error', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': '11:22:33:44:55:66-status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.stove_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Stove Status', + 'options': list([ + 'off', + 'off_timer', + 'test_fire', + 'heatup', + 'fueling', + 'ign_test', + 'burning', + 'burning_mod', + 'unknown', + 'cool_fluid', + 'fire_stop', + 'clean_fire', + 'cooling', + 'cleanup', + 'ecomode', + 'chimney_alarm', + 'grate_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'off', + 'fueling', + 'ign_test', + 'burning', + 'firewood_finished', + 'cooling', + 'clean_fire', + 'general_error', + 'general_error', + 'door_open', + 'temp_too_high', + 'cleaning_warning', + 'fuel_error', + 'pellet_water_error', + 't05_error', + 'hatch_door_open', + 'pressure_error', + 'main_probe_failure', + 'flue_probe_failure', + 'exhaust_temp_high', + 'pellet_finished', + 'general_error', + ]), + }), + 'context': , + 'entity_id': 'sensor.stove_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'burning', + }) +# --- # name: test_all_entities[sensor.stove_tank_water_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ From d8e853941a3847db28c518f48df7331e403a9956 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 11:10:38 +0100 Subject: [PATCH 331/677] Bump holidays to 0.63 (#133391) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 7edc140da11..33cae231595 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.62", "babel==2.15.0"] + "requirements": ["holidays==0.63", "babel==2.15.0"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 842c6f1f1ad..de9cbe694d8 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.62"] + "requirements": ["holidays==0.63"] } diff --git a/requirements_all.txt b/requirements_all.txt index c4e9529c6c8..f2ab0a938d9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1131,7 +1131,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.62 +holidays==0.63 # homeassistant.components.frontend home-assistant-frontend==20241127.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 056d7422195..c6631388041 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -960,7 +960,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.62 +holidays==0.63 # homeassistant.components.frontend home-assistant-frontend==20241127.8 From 0dbd5bffe6014f7c5f613acf71962f1fc4b8548e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 17 Dec 2024 11:26:51 +0100 Subject: [PATCH 332/677] Fix incorrect schema in config tests (#133404) --- tests/components/config/test_config_entries.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 4d37f3c871b..0a1ffbe87b3 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -415,7 +415,7 @@ async def test_initialize_flow(hass: HomeAssistant, client: TestClient) -> None: return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), description_placeholders={ "url": "https://example.com", "show_advanced_options": self.show_advanced_options, @@ -804,7 +804,7 @@ async def test_get_progress_flow(hass: HomeAssistant, client: TestClient) -> Non return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), errors={"username": "Should be unique."}, ) @@ -842,7 +842,7 @@ async def test_get_progress_flow_unauth( return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), errors={"username": "Should be unique."}, ) @@ -874,7 +874,7 @@ async def test_options_flow(hass: HomeAssistant, client: TestClient) -> None: schema[vol.Required("enabled")] = bool return self.async_show_form( step_id="user", - data_schema=schema, + data_schema=vol.Schema(schema), description_placeholders={"enabled": "Set to true to be true"}, ) From 084ef206955a428106c347cc6d60bf3b83a0c080 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Tue, 17 Dec 2024 11:33:04 +0100 Subject: [PATCH 333/677] Add quality_scale.yaml to enphase_envoy (#132489) --- .../components/enphase_envoy/config_flow.py | 3 +- .../enphase_envoy/quality_scale.yaml | 124 ++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 3 files changed, 126 insertions(+), 2 deletions(-) create mode 100644 homeassistant/components/enphase_envoy/quality_scale.yaml diff --git a/homeassistant/components/enphase_envoy/config_flow.py b/homeassistant/components/enphase_envoy/config_flow.py index 23c769293c8..70ba3570e91 100644 --- a/homeassistant/components/enphase_envoy/config_flow.py +++ b/homeassistant/components/enphase_envoy/config_flow.py @@ -31,6 +31,7 @@ from .const import ( OPTION_DISABLE_KEEP_ALIVE, OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE, ) +from .coordinator import EnphaseConfigEntry _LOGGER = logging.getLogger(__name__) @@ -67,7 +68,7 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: EnphaseConfigEntry, ) -> EnvoyOptionsFlowHandler: """Options flow handler for Enphase_Envoy.""" return EnvoyOptionsFlowHandler() diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml new file mode 100644 index 00000000000..c4077b8df67 --- /dev/null +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -0,0 +1,124 @@ +rules: + # Bronze + action-setup: + status: done + comment: only actions implemented are platform native ones. + appropriate-polling: + status: done + comment: fixed 1 minute cycle based on Enphase Envoy device characteristics + brands: done + common-modules: + status: done + comment: | + In coordinator.py, you set self.entry = entry, while after the super constructor, + you can access the entry via self.config_entry (you would have to overwrite the + type to make sure you don't have to assert not None every time)done + config-flow-test-coverage: + status: todo + comment: | + - test_form is missing an assertion for the unique id of the resulting entry + - Let's also have test_user_no_serial_number assert the unique_id (as in, it can't be set to the serial_number since we dont have one, so let's assert what it will result in) + - Let's have every test result in either CREATE_ENTRY or ABORT (like test_form_invalid_auth or test_form_cannot_connect, they can be parametrized) + - test_zeroconf_token_firmware and test_zeroconf_pre_token_firmware can also be parametrized I think + - test_zero_conf_malformed_serial_property - with pytest.raises(KeyError) as ex:: + I don't believe this should be able to raise a KeyError Shouldn't we abort the flow? + test_reauth -> Let's also assert result before we start with the async_configure part + config-flow: + status: todo + comment: | + - async_step_zeroconf -> a config entry title is considered userland, + so if someone renamed their entry, it will be reverted back with the code at L146. + - async_step_reaut L160: I believe that the unique is already set when starting a reauth flow + - The config flow is missing data descriptions for the other fields + dependency-transparency: done + docs-actions: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy/#actions + docs-high-level-description: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy + docs-installation-instructions: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#prerequisites + docs-removal-instructions: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#removing-the-integration + entity-event-setup: + status: done + comment: no events used. + entity-unique-id: done + has-entity-name: done + runtime-data: + status: done + comment: | + async_unload_entry- coordinator: EnphaseUpdateCoordinator = entry.runtime_data + You can remove the EnphaseUpdateCoordinator as the type can now be inferred thanks to the typed config entry + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: todo + comment: | + needs to raise appropriate error when exception occurs. + Pending https://github.com/pyenphase/pyenphase/pull/194 + config-entry-unloading: done + docs-configuration-parameters: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#configuration + docs-installation-parameters: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#required-manual-input + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: done + comment: pending https://github.com/home-assistant/core/pull/132373 + reauthentication-flow: done + test-coverage: + status: todo + comment: | + - test_config_different_unique_id -> unique_id set to the mock config entry is an int, not a str + - Apart from the coverage, test_option_change_reload does not verify that the config entry is reloaded + + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#data-updates + docs-examples: + status: todo + comment: add blue-print examples, if any + docs-known-limitations: todo + docs-supported-devices: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#supported-devices + docs-supported-functions: todo + docs-troubleshooting: + status: done + comment: https://www.home-assistant.io/integrations/enphase_envoy#troubleshooting + docs-use-cases: todo + dynamic-devices: todo + entity-category: todo + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: + status: todo + comment: pending https://github.com/home-assistant/core/pull/132483 + icon-translations: todo + reconfiguration-flow: done + repair-issues: + status: exempt + comment: no general issues or repair.py + stale-devices: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 5ad3467dd79..83335fa5c44 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -355,7 +355,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "energyzero", "enigma2", "enocean", - "enphase_envoy", "entur_public_transport", "environment_canada", "envisalink", From ce0117b2b82cda7900f18d781be2d6a8d0f807ed Mon Sep 17 00:00:00 2001 From: Jonas Fors Lellky Date: Tue, 17 Dec 2024 11:36:45 +0100 Subject: [PATCH 334/677] Fix fan setpoints for flexit_bacnet (#133388) --- .../components/flexit_bacnet/number.py | 52 ++++++++------ tests/components/flexit_bacnet/conftest.py | 20 +++--- .../flexit_bacnet/snapshots/test_number.ambr | 68 +++++++++---------- tests/components/flexit_bacnet/test_number.py | 8 +-- 4 files changed, 80 insertions(+), 68 deletions(-) diff --git a/homeassistant/components/flexit_bacnet/number.py b/homeassistant/components/flexit_bacnet/number.py index 6e6e2eea980..029ce896445 100644 --- a/homeassistant/components/flexit_bacnet/number.py +++ b/homeassistant/components/flexit_bacnet/number.py @@ -29,6 +29,8 @@ class FlexitNumberEntityDescription(NumberEntityDescription): """Describes a Flexit number entity.""" native_value_fn: Callable[[FlexitBACnet], float] + native_max_value_fn: Callable[[FlexitBACnet], int] + native_min_value_fn: Callable[[FlexitBACnet], int] set_native_value_fn: Callable[[FlexitBACnet], Callable[[int], Awaitable[None]]] @@ -37,121 +39,121 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( key="away_extract_fan_setpoint", translation_key="away_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_away, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_away, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="away_supply_fan_setpoint", translation_key="away_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_away, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_away, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="cooker_hood_extract_fan_setpoint", translation_key="cooker_hood_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_cooker, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="cooker_hood_supply_fan_setpoint", translation_key="cooker_hood_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_cooker, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="fireplace_extract_fan_setpoint", translation_key="fireplace_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_fire, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="fireplace_supply_fan_setpoint", translation_key="fireplace_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_fire, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="high_extract_fan_setpoint", translation_key="high_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_high, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), ), FlexitNumberEntityDescription( key="high_supply_fan_setpoint", translation_key="high_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_high, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), ), FlexitNumberEntityDescription( key="home_extract_fan_setpoint", translation_key="home_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_home, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_away), ), FlexitNumberEntityDescription( key="home_supply_fan_setpoint", translation_key="home_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_home, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_away), ), ) @@ -192,6 +194,16 @@ class FlexitNumber(FlexitEntity, NumberEntity): """Return the state of the number.""" return self.entity_description.native_value_fn(self.coordinator.device) + @property + def native_max_value(self) -> float: + """Return the native max value of the number.""" + return self.entity_description.native_max_value_fn(self.coordinator.device) + + @property + def native_min_value(self) -> float: + """Return the native min value of the number.""" + return self.entity_description.native_min_value_fn(self.coordinator.device) + async def async_set_native_value(self, value: float) -> None: """Update the current value.""" set_native_value_fn = self.entity_description.set_native_value_fn( diff --git a/tests/components/flexit_bacnet/conftest.py b/tests/components/flexit_bacnet/conftest.py index a6205bac506..6ce17261bfc 100644 --- a/tests/components/flexit_bacnet/conftest.py +++ b/tests/components/flexit_bacnet/conftest.py @@ -69,16 +69,16 @@ def mock_flexit_bacnet() -> Generator[AsyncMock]: flexit_bacnet.electric_heater = True # Mock fan setpoints - flexit_bacnet.fan_setpoint_extract_air_fire = 10 - flexit_bacnet.fan_setpoint_supply_air_fire = 20 - flexit_bacnet.fan_setpoint_extract_air_away = 30 - flexit_bacnet.fan_setpoint_supply_air_away = 40 - flexit_bacnet.fan_setpoint_extract_air_home = 50 - flexit_bacnet.fan_setpoint_supply_air_home = 60 - flexit_bacnet.fan_setpoint_extract_air_high = 70 - flexit_bacnet.fan_setpoint_supply_air_high = 80 - flexit_bacnet.fan_setpoint_extract_air_cooker = 90 - flexit_bacnet.fan_setpoint_supply_air_cooker = 100 + flexit_bacnet.fan_setpoint_extract_air_fire = 56 + flexit_bacnet.fan_setpoint_supply_air_fire = 77 + flexit_bacnet.fan_setpoint_extract_air_away = 40 + flexit_bacnet.fan_setpoint_supply_air_away = 42 + flexit_bacnet.fan_setpoint_extract_air_home = 70 + flexit_bacnet.fan_setpoint_supply_air_home = 74 + flexit_bacnet.fan_setpoint_extract_air_high = 100 + flexit_bacnet.fan_setpoint_supply_air_high = 100 + flexit_bacnet.fan_setpoint_extract_air_cooker = 50 + flexit_bacnet.fan_setpoint_supply_air_cooker = 70 yield flexit_bacnet diff --git a/tests/components/flexit_bacnet/snapshots/test_number.ambr b/tests/components/flexit_bacnet/snapshots/test_number.ambr index c4fb1e7c434..78eefd08345 100644 --- a/tests/components/flexit_bacnet/snapshots/test_number.ambr +++ b/tests/components/flexit_bacnet/snapshots/test_number.ambr @@ -5,8 +5,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 100, - 'min': 0, + 'max': 70, + 'min': 30, 'mode': , 'step': 1, }), @@ -42,8 +42,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Device Name Away extract fan setpoint', - 'max': 100, - 'min': 0, + 'max': 70, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -53,7 +53,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '30', + 'state': '40', }) # --- # name: test_numbers[number.device_name_away_supply_fan_setpoint-entry] @@ -62,8 +62,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 100, - 'min': 0, + 'max': 74, + 'min': 30, 'mode': , 'step': 1, }), @@ -99,8 +99,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Device Name Away supply fan setpoint', - 'max': 100, - 'min': 0, + 'max': 74, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -110,7 +110,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '40', + 'state': '42', }) # --- # name: test_numbers[number.device_name_cooker_hood_extract_fan_setpoint-entry] @@ -120,7 +120,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -157,7 +157,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Cooker hood extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -167,7 +167,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '90', + 'state': '50', }) # --- # name: test_numbers[number.device_name_cooker_hood_supply_fan_setpoint-entry] @@ -177,7 +177,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -214,7 +214,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Cooker hood supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -224,7 +224,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '70', }) # --- # name: test_numbers[number.device_name_fireplace_extract_fan_setpoint-entry] @@ -234,7 +234,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -271,7 +271,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Fireplace extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -281,7 +281,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '10', + 'state': '56', }) # --- # name: test_numbers[number.device_name_fireplace_supply_fan_setpoint-entry] @@ -291,7 +291,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -328,7 +328,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Fireplace supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -338,7 +338,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '20', + 'state': '77', }) # --- # name: test_numbers[number.device_name_high_extract_fan_setpoint-entry] @@ -348,7 +348,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 70, 'mode': , 'step': 1, }), @@ -385,7 +385,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name High extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 70, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -395,7 +395,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '70', + 'state': '100', }) # --- # name: test_numbers[number.device_name_high_supply_fan_setpoint-entry] @@ -405,7 +405,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 74, 'mode': , 'step': 1, }), @@ -442,7 +442,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name High supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 74, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -452,7 +452,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '80', + 'state': '100', }) # --- # name: test_numbers[number.device_name_home_extract_fan_setpoint-entry] @@ -462,7 +462,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 40, 'mode': , 'step': 1, }), @@ -499,7 +499,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Home extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 40, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -509,7 +509,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '50', + 'state': '70', }) # --- # name: test_numbers[number.device_name_home_supply_fan_setpoint-entry] @@ -519,7 +519,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 42, 'mode': , 'step': 1, }), @@ -556,7 +556,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Home supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 42, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -566,6 +566,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '60', + 'state': '74', }) # --- diff --git a/tests/components/flexit_bacnet/test_number.py b/tests/components/flexit_bacnet/test_number.py index ad49908fa96..f566b623f12 100644 --- a/tests/components/flexit_bacnet/test_number.py +++ b/tests/components/flexit_bacnet/test_number.py @@ -64,21 +64,21 @@ async def test_numbers_implementation( assert len(mocked_method.mock_calls) == 1 assert hass.states.get(ENTITY_ID).state == "60" - mock_flexit_bacnet.fan_setpoint_supply_air_fire = 10 + mock_flexit_bacnet.fan_setpoint_supply_air_fire = 40 await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: ENTITY_ID, - ATTR_VALUE: 10, + ATTR_VALUE: 40, }, blocking=True, ) mocked_method = getattr(mock_flexit_bacnet, "set_fan_setpoint_supply_air_fire") assert len(mocked_method.mock_calls) == 2 - assert hass.states.get(ENTITY_ID).state == "10" + assert hass.states.get(ENTITY_ID).state == "40" # Error recovery, when setting the value mock_flexit_bacnet.set_fan_setpoint_supply_air_fire.side_effect = DecodingError @@ -89,7 +89,7 @@ async def test_numbers_implementation( SERVICE_SET_VALUE, { ATTR_ENTITY_ID: ENTITY_ID, - ATTR_VALUE: 10, + ATTR_VALUE: 40, }, blocking=True, ) From 991864b38c9b145a91763addef3ade6a2fc89708 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 17 Dec 2024 12:02:53 +0100 Subject: [PATCH 335/677] Fix schema translation checks for nested config-flow sections (#133392) --- tests/components/conftest.py | 94 +++++++++++++++++++++++++----------- 1 file changed, 66 insertions(+), 28 deletions(-) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 3828cc5ff37..e95147b8664 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -19,6 +19,7 @@ from aiohasupervisor.models import ( StoreInfo, ) import pytest +import voluptuous as vol from homeassistant.components import repairs from homeassistant.config_entries import ( @@ -34,6 +35,7 @@ from homeassistant.data_entry_flow import ( FlowHandler, FlowManager, FlowResultType, + section, ) from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir @@ -644,6 +646,61 @@ def _get_integration_quality_scale_rule(integration: str, rule: str) -> str: return status if isinstance(status, str) else status["status"] +async def _check_step_or_section_translations( + hass: HomeAssistant, + translation_errors: dict[str, str], + category: str, + integration: str, + translation_prefix: str, + description_placeholders: dict[str, str], + data_schema: vol.Schema | None, +) -> None: + # neither title nor description are required + # - title defaults to integration name + # - description is optional + for header in ("title", "description"): + await _validate_translation( + hass, + translation_errors, + category, + integration, + f"{translation_prefix}.{header}", + description_placeholders, + translation_required=False, + ) + + if not data_schema: + return + + for data_key, data_value in data_schema.schema.items(): + if isinstance(data_value, section): + # check the nested section + await _check_step_or_section_translations( + hass, + translation_errors, + category, + integration, + f"{translation_prefix}.sections.{data_key}", + description_placeholders, + data_value.schema, + ) + return + iqs_config_flow = _get_integration_quality_scale_rule( + integration, "config-flow" + ) + # data and data_description are compulsory + for header in ("data", "data_description"): + await _validate_translation( + hass, + translation_errors, + category, + integration, + f"{translation_prefix}.{header}.{data_key}", + description_placeholders, + translation_required=(iqs_config_flow == "done"), + ) + + async def _check_config_flow_result_translations( manager: FlowManager, flow: FlowHandler, @@ -675,35 +732,16 @@ async def _check_config_flow_result_translations( setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) if result["type"] is FlowResultType.FORM: - iqs_config_flow = _get_integration_quality_scale_rule( - integration, "config-flow" - ) if step_id := result.get("step_id"): - # neither title nor description are required - # - title defaults to integration name - # - description is optional - for header in ("title", "description"): - await _validate_translation( - flow.hass, - translation_errors, - category, - integration, - f"{key_prefix}step.{step_id}.{header}", - result["description_placeholders"], - translation_required=False, - ) - if iqs_config_flow == "done" and (data_schema := result["data_schema"]): - # data and data_description are compulsory - for data_key in data_schema.schema: - for header in ("data", "data_description"): - await _validate_translation( - flow.hass, - translation_errors, - category, - integration, - f"{key_prefix}step.{step_id}.{header}.{data_key}", - result["description_placeholders"], - ) + await _check_step_or_section_translations( + flow.hass, + translation_errors, + category, + integration, + f"{key_prefix}step.{step_id}", + result["description_placeholders"], + result["data_schema"], + ) if errors := result.get("errors"): for error in errors.values(): From 637614299ce5e649fc5e01fda1b607ade7c1165b Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 12:41:18 +0100 Subject: [PATCH 336/677] Fix strptime in python_script (#133159) Co-authored-by: Erik Montnemery --- .../components/python_script/__init__.py | 17 +++++++++++++ tests/components/python_script/test_init.py | 24 +++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/homeassistant/components/python_script/__init__.py b/homeassistant/components/python_script/__init__.py index 70e9c5b0d29..af773278029 100644 --- a/homeassistant/components/python_script/__init__.py +++ b/homeassistant/components/python_script/__init__.py @@ -1,5 +1,6 @@ """Component to allow running Python scripts.""" +from collections.abc import Mapping, Sequence import datetime import glob import logging @@ -7,6 +8,7 @@ from numbers import Number import operator import os import time +import types from typing import Any from RestrictedPython import ( @@ -167,6 +169,20 @@ IOPERATOR_TO_OPERATOR = { } +def guarded_import( + name: str, + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, + fromlist: Sequence[str] = (), + level: int = 0, +) -> types.ModuleType: + """Guard imports.""" + # Allow import of _strptime needed by datetime.datetime.strptime + if name == "_strptime": + return __import__(name, globals, locals, fromlist, level) + raise ScriptError(f"Not allowed to import {name}") + + def guarded_inplacevar(op: str, target: Any, operand: Any) -> Any: """Implement augmented-assign (+=, -=, etc.) operators for restricted code. @@ -232,6 +248,7 @@ def execute(hass, filename, source, data=None, return_response=False): return getattr(obj, name, default) extra_builtins = { + "__import__": guarded_import, "datetime": datetime, "sorted": sorted, "time": TimeWrapper(), diff --git a/tests/components/python_script/test_init.py b/tests/components/python_script/test_init.py index c4dc00c448a..2d151b4b81e 100644 --- a/tests/components/python_script/test_init.py +++ b/tests/components/python_script/test_init.py @@ -688,3 +688,27 @@ async def test_prohibited_augmented_assignment_operations( hass.async_add_executor_job(execute, hass, "aug_assign_prohibited.py", case, {}) await hass.async_block_till_done(wait_background_tasks=True) assert error in caplog.text + + +async def test_import_allow_strptime( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test calling datetime.datetime.strptime works.""" + source = """ +test_date = datetime.datetime.strptime('2024-04-01', '%Y-%m-%d') +logger.info(f'Date {test_date}') + """ + hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Error executing script: Not allowed to import _strptime" not in caplog.text + assert "Date 2024-04-01 00:00:00" in caplog.text + + +async def test_no_other_imports_allowed( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test imports are not allowed.""" + source = "import sys" + hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Error executing script: Not allowed to import sys" in caplog.text From a42c0230c9796d5401e131d7eeeface98c3f80e0 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 1 Dec 2024 18:26:29 +0100 Subject: [PATCH 337/677] Simplify recorder RecorderRunsManager (#131785) --- .../recorder/table_managers/recorder_runs.py | 73 +++---------------- .../table_managers/test_recorder_runs.py | 32 ++------ 2 files changed, 15 insertions(+), 90 deletions(-) diff --git a/homeassistant/components/recorder/table_managers/recorder_runs.py b/homeassistant/components/recorder/table_managers/recorder_runs.py index b0b9818118b..4ca0aa18b88 100644 --- a/homeassistant/components/recorder/table_managers/recorder_runs.py +++ b/homeassistant/components/recorder/table_managers/recorder_runs.py @@ -2,8 +2,6 @@ from __future__ import annotations -import bisect -from dataclasses import dataclass from datetime import datetime from sqlalchemy.orm.session import Session @@ -11,34 +9,6 @@ from sqlalchemy.orm.session import Session import homeassistant.util.dt as dt_util from ..db_schema import RecorderRuns -from ..models import process_timestamp - - -def _find_recorder_run_for_start_time( - run_history: _RecorderRunsHistory, start: datetime -) -> RecorderRuns | None: - """Find the recorder run for a start time in _RecorderRunsHistory.""" - run_timestamps = run_history.run_timestamps - runs_by_timestamp = run_history.runs_by_timestamp - - # bisect_left tells us were we would insert - # a value in the list of runs after the start timestamp. - # - # The run before that (idx-1) is when the run started - # - # If idx is 0, history never ran before the start timestamp - # - if idx := bisect.bisect_left(run_timestamps, start.timestamp()): - return runs_by_timestamp[run_timestamps[idx - 1]] - return None - - -@dataclass(frozen=True) -class _RecorderRunsHistory: - """Bisectable history of RecorderRuns.""" - - run_timestamps: list[int] - runs_by_timestamp: dict[int, RecorderRuns] class RecorderRunsManager: @@ -48,7 +18,7 @@ class RecorderRunsManager: """Track recorder run history.""" self._recording_start = dt_util.utcnow() self._current_run_info: RecorderRuns | None = None - self._run_history = _RecorderRunsHistory([], {}) + self._first_run: RecorderRuns | None = None @property def recording_start(self) -> datetime: @@ -58,9 +28,7 @@ class RecorderRunsManager: @property def first(self) -> RecorderRuns: """Get the first run.""" - if runs_by_timestamp := self._run_history.runs_by_timestamp: - return next(iter(runs_by_timestamp.values())) - return self.current + return self._first_run or self.current @property def current(self) -> RecorderRuns: @@ -78,15 +46,6 @@ class RecorderRunsManager: """Return if a run is active.""" return self._current_run_info is not None - def get(self, start: datetime) -> RecorderRuns | None: - """Return the recorder run that started before or at start. - - If the first run started after the start, return None - """ - if start >= self.recording_start: - return self.current - return _find_recorder_run_for_start_time(self._run_history, start) - def start(self, session: Session) -> None: """Start a new run. @@ -122,31 +81,17 @@ class RecorderRunsManager: Must run in the recorder thread. """ - run_timestamps: list[int] = [] - runs_by_timestamp: dict[int, RecorderRuns] = {} - - for run in session.query(RecorderRuns).order_by(RecorderRuns.start.asc()).all(): + if ( + run := session.query(RecorderRuns) + .order_by(RecorderRuns.start.asc()) + .first() + ): session.expunge(run) - if run_dt := process_timestamp(run.start): - # Not sure if this is correct or runs_by_timestamp annotation should be changed - timestamp = int(run_dt.timestamp()) - run_timestamps.append(timestamp) - runs_by_timestamp[timestamp] = run - - # - # self._run_history is accessed in get() - # which is allowed to be called from any thread - # - # We use a dataclass to ensure that when we update - # run_timestamps and runs_by_timestamp - # are never out of sync with each other. - # - self._run_history = _RecorderRunsHistory(run_timestamps, runs_by_timestamp) + self._first_run = run def clear(self) -> None: """Clear the current run after ending it. Must run in the recorder thread. """ - if self._current_run_info: - self._current_run_info = None + self._current_run_info = None diff --git a/tests/components/recorder/table_managers/test_recorder_runs.py b/tests/components/recorder/table_managers/test_recorder_runs.py index 41f3a8fef4d..e79def01bad 100644 --- a/tests/components/recorder/table_managers/test_recorder_runs.py +++ b/tests/components/recorder/table_managers/test_recorder_runs.py @@ -21,6 +21,11 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None two_days_ago = now - timedelta(days=2) one_day_ago = now - timedelta(days=1) + # Test that the first run falls back to the current run + assert process_timestamp( + instance.recorder_runs_manager.first.start + ) == process_timestamp(instance.recorder_runs_manager.current.start) + with instance.get_session() as session: session.add(RecorderRuns(start=three_days_ago, created=three_days_ago)) session.add(RecorderRuns(start=two_days_ago, created=two_days_ago)) @@ -29,32 +34,7 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None instance.recorder_runs_manager.load_from_db(session) assert ( - process_timestamp( - instance.recorder_runs_manager.get( - three_days_ago + timedelta(microseconds=1) - ).start - ) - == three_days_ago - ) - assert ( - process_timestamp( - instance.recorder_runs_manager.get( - two_days_ago + timedelta(microseconds=1) - ).start - ) - == two_days_ago - ) - assert ( - process_timestamp( - instance.recorder_runs_manager.get( - one_day_ago + timedelta(microseconds=1) - ).start - ) - == one_day_ago - ) - assert ( - process_timestamp(instance.recorder_runs_manager.get(now).start) - == instance.recorder_runs_manager.recording_start + process_timestamp(instance.recorder_runs_manager.first.start) == three_days_ago ) From cce7b9ac3448bf97d05dac898855124a29060940 Mon Sep 17 00:00:00 2001 From: rappenze Date: Sun, 15 Dec 2024 11:02:26 +0100 Subject: [PATCH 338/677] Fix fibaro climate hvac mode (#132508) --- homeassistant/components/fibaro/climate.py | 6 +- tests/components/fibaro/conftest.py | 56 +++++++++ tests/components/fibaro/test_climate.py | 134 +++++++++++++++++++++ 3 files changed, 193 insertions(+), 3 deletions(-) create mode 100644 tests/components/fibaro/test_climate.py diff --git a/homeassistant/components/fibaro/climate.py b/homeassistant/components/fibaro/climate.py index 0bfc2223317..6948dc9122f 100644 --- a/homeassistant/components/fibaro/climate.py +++ b/homeassistant/components/fibaro/climate.py @@ -274,7 +274,9 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): if isinstance(fibaro_operation_mode, str): with suppress(ValueError): return HVACMode(fibaro_operation_mode.lower()) - elif fibaro_operation_mode in OPMODES_HVAC: + # when the mode cannot be instantiated a preset_mode is selected + return HVACMode.AUTO + if fibaro_operation_mode in OPMODES_HVAC: return OPMODES_HVAC[fibaro_operation_mode] return None @@ -282,8 +284,6 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): """Set new target operation mode.""" if not self._op_mode_device: return - if self.preset_mode: - return if "setOperatingMode" in self._op_mode_device.fibaro_device.actions: self._op_mode_device.action("setOperatingMode", HA_OPMODES_HVAC[hvac_mode]) diff --git a/tests/components/fibaro/conftest.py b/tests/components/fibaro/conftest.py index 1976a8f310b..583c44a41e6 100644 --- a/tests/components/fibaro/conftest.py +++ b/tests/components/fibaro/conftest.py @@ -129,6 +129,62 @@ def mock_light() -> Mock: return light +@pytest.fixture +def mock_thermostat() -> Mock: + """Fixture for a thermostat.""" + climate = Mock() + climate.fibaro_id = 4 + climate.parent_fibaro_id = 0 + climate.name = "Test climate" + climate.room_id = 1 + climate.dead = False + climate.visible = True + climate.enabled = True + climate.type = "com.fibaro.thermostatDanfoss" + climate.base_type = "com.fibaro.device" + climate.properties = {"manufacturer": ""} + climate.actions = {"setThermostatMode": 1} + climate.supported_features = {} + climate.has_supported_thermostat_modes = True + climate.supported_thermostat_modes = ["Off", "Heat", "CustomerSpecific"] + climate.has_operating_mode = False + climate.has_thermostat_mode = True + climate.thermostat_mode = "CustomerSpecific" + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + climate.value = value_mock + return climate + + +@pytest.fixture +def mock_thermostat_with_operating_mode() -> Mock: + """Fixture for a thermostat.""" + climate = Mock() + climate.fibaro_id = 4 + climate.parent_fibaro_id = 0 + climate.name = "Test climate" + climate.room_id = 1 + climate.dead = False + climate.visible = True + climate.enabled = True + climate.type = "com.fibaro.thermostatDanfoss" + climate.base_type = "com.fibaro.device" + climate.properties = {"manufacturer": ""} + climate.actions = {"setOperationMode": 1} + climate.supported_features = {} + climate.has_supported_operating_modes = True + climate.supported_operating_modes = [0, 1, 15] + climate.has_operating_mode = True + climate.operating_mode = 15 + climate.has_thermostat_mode = False + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + climate.value = value_mock + return climate + + @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return the default mocked config entry.""" diff --git a/tests/components/fibaro/test_climate.py b/tests/components/fibaro/test_climate.py new file mode 100644 index 00000000000..31022e19a08 --- /dev/null +++ b/tests/components/fibaro/test_climate.py @@ -0,0 +1,134 @@ +"""Test the Fibaro climate platform.""" + +from unittest.mock import Mock, patch + +from homeassistant.components.climate import ClimateEntityFeature, HVACMode +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import init_integration + +from tests.common import MockConfigEntry + + +async def test_climate_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the climate creates an entity.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + entry = entity_registry.async_get("climate.room_1_test_climate_4") + assert entry + assert entry.unique_id == "hc2_111111.4" + assert entry.original_name == "Room 1 Test climate" + assert entry.supported_features == ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.PRESET_MODE + ) + + +async def test_hvac_mode_preset( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the climate state is auto when a preset is selected.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.AUTO + assert state.attributes["preset_mode"] == "CustomerSpecific" + + +async def test_hvac_mode_heat( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that the preset mode is None if a hvac mode is active.""" + + # Arrange + mock_thermostat.thermostat_mode = "Heat" + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.HEAT + assert state.attributes["preset_mode"] is None + + +async def test_set_hvac_mode( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat: Mock, + mock_room: Mock, +) -> None: + """Test that set_hvac_mode() works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + await hass.services.async_call( + "climate", + "set_hvac_mode", + {"entity_id": "climate.room_1_test_climate_4", "hvac_mode": HVACMode.HEAT}, + blocking=True, + ) + + # Assert + mock_thermostat.execute_action.assert_called_once() + + +async def test_hvac_mode_with_operation_mode_support( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat_with_operating_mode: Mock, + mock_room: Mock, +) -> None: + """Test that operating mode works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat_with_operating_mode] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_4") + assert state.state == HVACMode.AUTO From 8286ec9e603334840e2dbd8ccf323a820aee5364 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 13 Dec 2024 13:30:22 +0100 Subject: [PATCH 339/677] Bump yt-dlp to 2024.12.13 (#133129) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index 195dc678bc2..21c07607573 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2024.12.06"], + "requirements": ["yt-dlp[default]==2024.12.13"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 38239d22af2..984e1b1374c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3066,7 +3066,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.06 +yt-dlp[default]==2024.12.13 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1c76684a4a1..72399d331bb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2452,7 +2452,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.12.06 +yt-dlp[default]==2024.12.13 # homeassistant.components.zamg zamg==0.3.6 From cdea9b5d3a387a3d39e90aa3736aca937685e52e Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 12:41:18 +0100 Subject: [PATCH 340/677] Fix strptime in python_script (#133159) Co-authored-by: Erik Montnemery --- .../components/python_script/__init__.py | 17 +++++++++++++ tests/components/python_script/test_init.py | 24 +++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/homeassistant/components/python_script/__init__.py b/homeassistant/components/python_script/__init__.py index 70e9c5b0d29..af773278029 100644 --- a/homeassistant/components/python_script/__init__.py +++ b/homeassistant/components/python_script/__init__.py @@ -1,5 +1,6 @@ """Component to allow running Python scripts.""" +from collections.abc import Mapping, Sequence import datetime import glob import logging @@ -7,6 +8,7 @@ from numbers import Number import operator import os import time +import types from typing import Any from RestrictedPython import ( @@ -167,6 +169,20 @@ IOPERATOR_TO_OPERATOR = { } +def guarded_import( + name: str, + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, + fromlist: Sequence[str] = (), + level: int = 0, +) -> types.ModuleType: + """Guard imports.""" + # Allow import of _strptime needed by datetime.datetime.strptime + if name == "_strptime": + return __import__(name, globals, locals, fromlist, level) + raise ScriptError(f"Not allowed to import {name}") + + def guarded_inplacevar(op: str, target: Any, operand: Any) -> Any: """Implement augmented-assign (+=, -=, etc.) operators for restricted code. @@ -232,6 +248,7 @@ def execute(hass, filename, source, data=None, return_response=False): return getattr(obj, name, default) extra_builtins = { + "__import__": guarded_import, "datetime": datetime, "sorted": sorted, "time": TimeWrapper(), diff --git a/tests/components/python_script/test_init.py b/tests/components/python_script/test_init.py index c4dc00c448a..2d151b4b81e 100644 --- a/tests/components/python_script/test_init.py +++ b/tests/components/python_script/test_init.py @@ -688,3 +688,27 @@ async def test_prohibited_augmented_assignment_operations( hass.async_add_executor_job(execute, hass, "aug_assign_prohibited.py", case, {}) await hass.async_block_till_done(wait_background_tasks=True) assert error in caplog.text + + +async def test_import_allow_strptime( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test calling datetime.datetime.strptime works.""" + source = """ +test_date = datetime.datetime.strptime('2024-04-01', '%Y-%m-%d') +logger.info(f'Date {test_date}') + """ + hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Error executing script: Not allowed to import _strptime" not in caplog.text + assert "Date 2024-04-01 00:00:00" in caplog.text + + +async def test_no_other_imports_allowed( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test imports are not allowed.""" + source = "import sys" + hass.async_add_executor_job(execute, hass, "test.py", source, {}) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Error executing script: Not allowed to import sys" in caplog.text From 223817a7fbbaa597e837d648b89b3d9326479b9a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 13 Dec 2024 16:37:26 -0500 Subject: [PATCH 341/677] Bump yalexs-ble to 2.5.4 (#133172) --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yalexs_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 99dbbc0ed9c..ed2c8007ee8 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 474ed36e90c..2ed1f4b5c43 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] } diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index 95d28cd5372..1472f9035ea 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.2"] + "requirements": ["yalexs-ble==2.5.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 984e1b1374c..5536c7723f5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3044,7 +3044,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.2 +yalexs-ble==2.5.4 # homeassistant.components.august # homeassistant.components.yale diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 72399d331bb..e3df6637649 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2433,7 +2433,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.2 +yalexs-ble==2.5.4 # homeassistant.components.august # homeassistant.components.yale From 9b02db008e389e39eff0821609ac1de940fe2954 Mon Sep 17 00:00:00 2001 From: Conor Eager Date: Mon, 16 Dec 2024 05:43:21 +1300 Subject: [PATCH 342/677] Bump starlink-grpc-core to 1.2.1 to fix missing ping (#133183) --- homeassistant/components/starlink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/starlink/manifest.json b/homeassistant/components/starlink/manifest.json index 070cbf1b44c..15bad3ebc2e 100644 --- a/homeassistant/components/starlink/manifest.json +++ b/homeassistant/components/starlink/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/starlink", "iot_class": "local_polling", - "requirements": ["starlink-grpc-core==1.2.0"] + "requirements": ["starlink-grpc-core==1.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 5536c7723f5..8b1846afc4f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2734,7 +2734,7 @@ starline==0.1.5 starlingbank==3.2 # homeassistant.components.starlink -starlink-grpc-core==1.2.0 +starlink-grpc-core==1.2.2 # homeassistant.components.statsd statsd==3.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e3df6637649..07d6a80270f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2183,7 +2183,7 @@ srpenergy==1.3.6 starline==0.1.5 # homeassistant.components.starlink -starlink-grpc-core==1.2.0 +starlink-grpc-core==1.2.2 # homeassistant.components.statsd statsd==3.2.1 From 9b0a4897539c2b1e8551953372c1730531a6414c Mon Sep 17 00:00:00 2001 From: Avi Miller Date: Sun, 15 Dec 2024 20:24:41 +1100 Subject: [PATCH 343/677] Bump aiolifx to 1.1.2 and add new HomeKit product prefixes (#133191) Signed-off-by: Avi Miller --- homeassistant/components/lifx/manifest.json | 5 ++++- homeassistant/generated/zeroconf.py | 12 ++++++++++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 18 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index c7d8a27a1c7..2e16eb2082b 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -23,6 +23,7 @@ "LIFX Ceiling", "LIFX Clean", "LIFX Color", + "LIFX Colour", "LIFX DLCOL", "LIFX Dlight", "LIFX DLWW", @@ -35,12 +36,14 @@ "LIFX Neon", "LIFX Nightvision", "LIFX PAR38", + "LIFX Permanent Outdoor", "LIFX Pls", "LIFX Plus", "LIFX Round", "LIFX Square", "LIFX String", "LIFX Tile", + "LIFX Tube", "LIFX White", "LIFX Z" ] @@ -48,7 +51,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.1.1", + "aiolifx==1.1.2", "aiolifx-effects==0.3.2", "aiolifx-themes==0.5.5" ] diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 5f7161a8245..749c1acfb15 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -92,6 +92,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Colour": { + "always_discover": True, + "domain": "lifx", + }, "LIFX DLCOL": { "always_discover": True, "domain": "lifx", @@ -140,6 +144,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Permanent Outdoor": { + "always_discover": True, + "domain": "lifx", + }, "LIFX Pls": { "always_discover": True, "domain": "lifx", @@ -164,6 +172,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX Tube": { + "always_discover": True, + "domain": "lifx", + }, "LIFX White": { "always_discover": True, "domain": "lifx", diff --git a/requirements_all.txt b/requirements_all.txt index 8b1846afc4f..6929deaf14e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -286,7 +286,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.5 # homeassistant.components.lifx -aiolifx==1.1.1 +aiolifx==1.1.2 # homeassistant.components.livisi aiolivisi==0.0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 07d6a80270f..83070eb8030 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -268,7 +268,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.5 # homeassistant.components.lifx -aiolifx==1.1.1 +aiolifx==1.1.2 # homeassistant.components.livisi aiolivisi==0.0.19 From e61142c2c2ce88bcfca3b141a77635e5d681c653 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 17 Dec 2024 12:53:27 +0100 Subject: [PATCH 344/677] Check if requirement is typed in strict_typing IQS validation (#133415) * Check if requirement is typed in strict_typing IQS validation * Apply suggestions from code review * Apply suggestions from code review * Return a list * Adjust * Improve --- .../components/fritz/quality_scale.yaml | 5 +++- .../components/imap/quality_scale.yaml | 5 +++- .../components/mastodon/quality_scale.yaml | 5 +++- .../components/mqtt/quality_scale.yaml | 5 +++- .../components/stookwijzer/quality_scale.yaml | 5 +++- .../quality_scale_validation/strict_typing.py | 29 +++++++++++++++++++ 6 files changed, 49 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/fritz/quality_scale.yaml b/homeassistant/components/fritz/quality_scale.yaml index b832492cf9d..06c572f93a6 100644 --- a/homeassistant/components/fritz/quality_scale.yaml +++ b/homeassistant/components/fritz/quality_scale.yaml @@ -95,4 +95,7 @@ rules: comment: | the fritzconnection lib is not async and relies on requests changing this might need a bit more efforts to be spent - strict-typing: done + strict-typing: + status: todo + comment: | + Requirements 'fritzconnection==1.14.0' and 'xmltodict==0.13.0' appear untyped diff --git a/homeassistant/components/imap/quality_scale.yaml b/homeassistant/components/imap/quality_scale.yaml index 180aef93f91..1c75b527882 100644 --- a/homeassistant/components/imap/quality_scale.yaml +++ b/homeassistant/components/imap/quality_scale.yaml @@ -94,4 +94,7 @@ rules: status: exempt comment: | This integration does not use web sessions. - strict-typing: done + strict-typing: + status: todo + comment: | + Requirement 'aioimaplib==1.1.0' appears untyped diff --git a/homeassistant/components/mastodon/quality_scale.yaml b/homeassistant/components/mastodon/quality_scale.yaml index 315ef808701..86702095e95 100644 --- a/homeassistant/components/mastodon/quality_scale.yaml +++ b/homeassistant/components/mastodon/quality_scale.yaml @@ -93,4 +93,7 @@ rules: # Platinum async-dependency: todo inject-websession: todo - strict-typing: done + strict-typing: + status: todo + comment: | + Requirement 'Mastodon.py==1.8.1' appears untyped diff --git a/homeassistant/components/mqtt/quality_scale.yaml b/homeassistant/components/mqtt/quality_scale.yaml index f31d3e25d15..26ce8cb08dd 100644 --- a/homeassistant/components/mqtt/quality_scale.yaml +++ b/homeassistant/components/mqtt/quality_scale.yaml @@ -125,4 +125,7 @@ rules: status: exempt comment: | This integration does not use web sessions. - strict-typing: done + strict-typing: + status: todo + comment: | + Requirement 'paho-mqtt==1.6.1' appears untyped diff --git a/homeassistant/components/stookwijzer/quality_scale.yaml b/homeassistant/components/stookwijzer/quality_scale.yaml index 67fadc00b64..20e64efaa92 100644 --- a/homeassistant/components/stookwijzer/quality_scale.yaml +++ b/homeassistant/components/stookwijzer/quality_scale.yaml @@ -86,4 +86,7 @@ rules: # Platinum async-dependency: done inject-websession: done - strict-typing: done + strict-typing: + status: todo + comment: | + Requirement 'stookwijzer==1.5.1' appears untyped diff --git a/script/hassfest/quality_scale_validation/strict_typing.py b/script/hassfest/quality_scale_validation/strict_typing.py index a27ab752cf0..c1373032ff8 100644 --- a/script/hassfest/quality_scale_validation/strict_typing.py +++ b/script/hassfest/quality_scale_validation/strict_typing.py @@ -4,6 +4,7 @@ https://developers.home-assistant.io/docs/core/integration-quality-scale/rules/s """ from functools import lru_cache +from importlib import metadata from pathlib import Path import re @@ -24,6 +25,29 @@ def _strict_typing_components(strict_typing_file: Path) -> set[str]: ) +def _check_requirements_are_typed(integration: Integration) -> list[str]: + """Check if all requirements are typed.""" + invalid_requirements = [] + for requirement in integration.requirements: + requirement_name, requirement_version = requirement.split("==") + # Remove any extras + requirement_name = requirement_name.split("[")[0] + try: + distribution = metadata.distribution(requirement_name) + except metadata.PackageNotFoundError: + # Package not installed locally + continue + if distribution.version != requirement_version: + # Version out of date locally + continue + + if not any(file for file in distribution.files if file.name == "py.typed"): + # no py.typed file + invalid_requirements.append(requirement) + + return invalid_requirements + + def validate( config: Config, integration: Integration, *, rules_done: set[str] ) -> list[str] | None: @@ -35,4 +59,9 @@ def validate( "Integration does not have strict typing enabled " "(is missing from .strict-typing)" ] + if untyped_requirements := _check_requirements_are_typed(integration): + return [ + f"Requirements {untyped_requirements} do not conform PEP 561 (https://peps.python.org/pep-0561/)", + "They should be typed and have a 'py.typed' file", + ] return None From ca47253d81e3774a95166323b2001e50f3d0be8d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sat, 14 Dec 2024 14:21:19 +0100 Subject: [PATCH 345/677] Revert "Simplify recorder RecorderRunsManager" (#133201) Revert "Simplify recorder RecorderRunsManager (#131785)" This reverts commit cf0ee635077114961f6e508be56ce7620c718c18. --- .../recorder/table_managers/recorder_runs.py | 73 ++++++++++++++++--- .../table_managers/test_recorder_runs.py | 32 ++++++-- 2 files changed, 90 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/recorder/table_managers/recorder_runs.py b/homeassistant/components/recorder/table_managers/recorder_runs.py index 4ca0aa18b88..b0b9818118b 100644 --- a/homeassistant/components/recorder/table_managers/recorder_runs.py +++ b/homeassistant/components/recorder/table_managers/recorder_runs.py @@ -2,6 +2,8 @@ from __future__ import annotations +import bisect +from dataclasses import dataclass from datetime import datetime from sqlalchemy.orm.session import Session @@ -9,6 +11,34 @@ from sqlalchemy.orm.session import Session import homeassistant.util.dt as dt_util from ..db_schema import RecorderRuns +from ..models import process_timestamp + + +def _find_recorder_run_for_start_time( + run_history: _RecorderRunsHistory, start: datetime +) -> RecorderRuns | None: + """Find the recorder run for a start time in _RecorderRunsHistory.""" + run_timestamps = run_history.run_timestamps + runs_by_timestamp = run_history.runs_by_timestamp + + # bisect_left tells us were we would insert + # a value in the list of runs after the start timestamp. + # + # The run before that (idx-1) is when the run started + # + # If idx is 0, history never ran before the start timestamp + # + if idx := bisect.bisect_left(run_timestamps, start.timestamp()): + return runs_by_timestamp[run_timestamps[idx - 1]] + return None + + +@dataclass(frozen=True) +class _RecorderRunsHistory: + """Bisectable history of RecorderRuns.""" + + run_timestamps: list[int] + runs_by_timestamp: dict[int, RecorderRuns] class RecorderRunsManager: @@ -18,7 +48,7 @@ class RecorderRunsManager: """Track recorder run history.""" self._recording_start = dt_util.utcnow() self._current_run_info: RecorderRuns | None = None - self._first_run: RecorderRuns | None = None + self._run_history = _RecorderRunsHistory([], {}) @property def recording_start(self) -> datetime: @@ -28,7 +58,9 @@ class RecorderRunsManager: @property def first(self) -> RecorderRuns: """Get the first run.""" - return self._first_run or self.current + if runs_by_timestamp := self._run_history.runs_by_timestamp: + return next(iter(runs_by_timestamp.values())) + return self.current @property def current(self) -> RecorderRuns: @@ -46,6 +78,15 @@ class RecorderRunsManager: """Return if a run is active.""" return self._current_run_info is not None + def get(self, start: datetime) -> RecorderRuns | None: + """Return the recorder run that started before or at start. + + If the first run started after the start, return None + """ + if start >= self.recording_start: + return self.current + return _find_recorder_run_for_start_time(self._run_history, start) + def start(self, session: Session) -> None: """Start a new run. @@ -81,17 +122,31 @@ class RecorderRunsManager: Must run in the recorder thread. """ - if ( - run := session.query(RecorderRuns) - .order_by(RecorderRuns.start.asc()) - .first() - ): + run_timestamps: list[int] = [] + runs_by_timestamp: dict[int, RecorderRuns] = {} + + for run in session.query(RecorderRuns).order_by(RecorderRuns.start.asc()).all(): session.expunge(run) - self._first_run = run + if run_dt := process_timestamp(run.start): + # Not sure if this is correct or runs_by_timestamp annotation should be changed + timestamp = int(run_dt.timestamp()) + run_timestamps.append(timestamp) + runs_by_timestamp[timestamp] = run + + # + # self._run_history is accessed in get() + # which is allowed to be called from any thread + # + # We use a dataclass to ensure that when we update + # run_timestamps and runs_by_timestamp + # are never out of sync with each other. + # + self._run_history = _RecorderRunsHistory(run_timestamps, runs_by_timestamp) def clear(self) -> None: """Clear the current run after ending it. Must run in the recorder thread. """ - self._current_run_info = None + if self._current_run_info: + self._current_run_info = None diff --git a/tests/components/recorder/table_managers/test_recorder_runs.py b/tests/components/recorder/table_managers/test_recorder_runs.py index e79def01bad..41f3a8fef4d 100644 --- a/tests/components/recorder/table_managers/test_recorder_runs.py +++ b/tests/components/recorder/table_managers/test_recorder_runs.py @@ -21,11 +21,6 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None two_days_ago = now - timedelta(days=2) one_day_ago = now - timedelta(days=1) - # Test that the first run falls back to the current run - assert process_timestamp( - instance.recorder_runs_manager.first.start - ) == process_timestamp(instance.recorder_runs_manager.current.start) - with instance.get_session() as session: session.add(RecorderRuns(start=three_days_ago, created=three_days_ago)) session.add(RecorderRuns(start=two_days_ago, created=two_days_ago)) @@ -34,7 +29,32 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None instance.recorder_runs_manager.load_from_db(session) assert ( - process_timestamp(instance.recorder_runs_manager.first.start) == three_days_ago + process_timestamp( + instance.recorder_runs_manager.get( + three_days_ago + timedelta(microseconds=1) + ).start + ) + == three_days_ago + ) + assert ( + process_timestamp( + instance.recorder_runs_manager.get( + two_days_ago + timedelta(microseconds=1) + ).start + ) + == two_days_ago + ) + assert ( + process_timestamp( + instance.recorder_runs_manager.get( + one_day_ago + timedelta(microseconds=1) + ).start + ) + == one_day_ago + ) + assert ( + process_timestamp(instance.recorder_runs_manager.get(now).start) + == instance.recorder_runs_manager.recording_start ) From 3b0ab421b0fa1b94326eea67c280a87ec7b8250f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 15 Dec 2024 12:28:32 +0100 Subject: [PATCH 346/677] Revert "Improve recorder history queries (#131702)" (#133203) --- homeassistant/components/history/__init__.py | 7 ++-- homeassistant/components/history/helpers.py | 13 ++++---- .../components/history/websocket_api.py | 7 ++-- homeassistant/components/recorder/core.py | 1 - .../components/recorder/history/legacy.py | 18 ++++++----- .../components/recorder/history/modern.py | 31 +++++++++--------- homeassistant/components/recorder/purge.py | 3 -- homeassistant/components/recorder/queries.py | 9 ------ .../recorder/table_managers/states.py | 32 ------------------- homeassistant/components/recorder/tasks.py | 2 ++ tests/components/recorder/test_purge.py | 17 ---------- 11 files changed, 38 insertions(+), 102 deletions(-) diff --git a/homeassistant/components/history/__init__.py b/homeassistant/components/history/__init__.py index 7241e1fac9a..365be06fd2d 100644 --- a/homeassistant/components/history/__init__.py +++ b/homeassistant/components/history/__init__.py @@ -22,7 +22,7 @@ import homeassistant.util.dt as dt_util from . import websocket_api from .const import DOMAIN -from .helpers import entities_may_have_state_changes_after, has_states_before +from .helpers import entities_may_have_state_changes_after, has_recorder_run_after CONF_ORDER = "use_include_order" @@ -107,10 +107,7 @@ class HistoryPeriodView(HomeAssistantView): no_attributes = "no_attributes" in request.query if ( - # has_states_before will return True if there are states older than - # end_time. If it's false, we know there are no states in the - # database up until end_time. - (end_time and not has_states_before(hass, end_time)) + (end_time and not has_recorder_run_after(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/history/helpers.py b/homeassistant/components/history/helpers.py index 2010b7373ff..bd477e7e4ed 100644 --- a/homeassistant/components/history/helpers.py +++ b/homeassistant/components/history/helpers.py @@ -6,6 +6,7 @@ from collections.abc import Iterable from datetime import datetime as dt from homeassistant.components.recorder import get_instance +from homeassistant.components.recorder.models import process_timestamp from homeassistant.core import HomeAssistant @@ -25,10 +26,8 @@ def entities_may_have_state_changes_after( return False -def has_states_before(hass: HomeAssistant, run_time: dt) -> bool: - """Check if the recorder has states as old or older than run_time. - - Returns True if there may be such states. - """ - oldest_ts = get_instance(hass).states_manager.oldest_ts - return oldest_ts is not None and run_time.timestamp() >= oldest_ts +def has_recorder_run_after(hass: HomeAssistant, run_time: dt) -> bool: + """Check if the recorder has any runs after a specific time.""" + return run_time >= process_timestamp( + get_instance(hass).recorder_runs_manager.first.start + ) diff --git a/homeassistant/components/history/websocket_api.py b/homeassistant/components/history/websocket_api.py index 35f8ed5f1ac..c85d975c3c9 100644 --- a/homeassistant/components/history/websocket_api.py +++ b/homeassistant/components/history/websocket_api.py @@ -39,7 +39,7 @@ from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util from .const import EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES -from .helpers import entities_may_have_state_changes_after, has_states_before +from .helpers import entities_may_have_state_changes_after, has_recorder_run_after _LOGGER = logging.getLogger(__name__) @@ -142,10 +142,7 @@ async def ws_get_history_during_period( no_attributes = msg["no_attributes"] if ( - # has_states_before will return True if there are states older than - # end_time. If it's false, we know there are no states in the - # database up until end_time. - (end_time and not has_states_before(hass, end_time)) + (end_time and not has_recorder_run_after(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 0c61f8a955e..0db677ac2af 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -1431,7 +1431,6 @@ class Recorder(threading.Thread): with session_scope(session=self.get_session()) as session: end_incomplete_runs(session, self.recorder_runs_manager.recording_start) self.recorder_runs_manager.start(session) - self.states_manager.load_from_db(session) self._open_event_session() diff --git a/homeassistant/components/recorder/history/legacy.py b/homeassistant/components/recorder/history/legacy.py index 3a0fe79455b..b59fc43c3d0 100644 --- a/homeassistant/components/recorder/history/legacy.py +++ b/homeassistant/components/recorder/history/legacy.py @@ -22,9 +22,9 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..db_schema import StateAttributes, States +from ..db_schema import RecorderRuns, StateAttributes, States from ..filters import Filters -from ..models import process_timestamp_to_utc_isoformat +from ..models import process_timestamp, process_timestamp_to_utc_isoformat from ..models.legacy import LegacyLazyState, legacy_row_to_compressed_state from ..util import execute_stmt_lambda_element, session_scope from .const import ( @@ -436,7 +436,7 @@ def get_last_state_changes( def _get_states_for_entities_stmt( - run_start_ts: float, + run_start: datetime, utc_point_in_time: datetime, entity_ids: list[str], no_attributes: bool, @@ -447,6 +447,7 @@ def _get_states_for_entities_stmt( ) # We got an include-list of entities, accelerate the query by filtering already # in the inner query. + run_start_ts = process_timestamp(run_start).timestamp() utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time) stmt += lambda q: q.join( ( @@ -482,7 +483,7 @@ def _get_rows_with_session( session: Session, utc_point_in_time: datetime, entity_ids: list[str], - *, + run: RecorderRuns | None = None, no_attributes: bool = False, ) -> Iterable[Row]: """Return the states at a specific point in time.""" @@ -494,16 +495,17 @@ def _get_rows_with_session( ), ) - oldest_ts = get_instance(hass).states_manager.oldest_ts + if run is None: + run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) - if oldest_ts is None or oldest_ts > utc_point_in_time.timestamp(): - # We don't have any states for the requested time + if run is None or process_timestamp(run.start) > utc_point_in_time: + # History did not run before utc_point_in_time return [] # We have more than one entity to look at so we need to do a query on states # since the last recorder run started. stmt = _get_states_for_entities_stmt( - oldest_ts, utc_point_in_time, entity_ids, no_attributes + run.start, utc_point_in_time, entity_ids, no_attributes ) return execute_stmt_lambda_element(session, stmt) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 902f1b5dc24..b44bec0d0ee 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -34,6 +34,7 @@ from ..models import ( LazyState, datetime_to_timestamp_or_none, extract_metadata_ids, + process_timestamp, row_to_compressed_state, ) from ..util import execute_stmt_lambda_element, session_scope @@ -245,9 +246,9 @@ def get_significant_states_with_session( if metadata_id is not None and split_entity_id(entity_id)[0] in SIGNIFICANT_DOMAINS ] - oldest_ts: float | None = None + run_start_ts: float | None = None if include_start_time_state and not ( - oldest_ts := _get_oldest_possible_ts(hass, start_time) + run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) ): include_start_time_state = False start_time_ts = dt_util.utc_to_timestamp(start_time) @@ -263,7 +264,7 @@ def get_significant_states_with_session( significant_changes_only, no_attributes, include_start_time_state, - oldest_ts, + run_start_ts, ), track_on=[ bool(single_metadata_id), @@ -410,9 +411,9 @@ def state_changes_during_period( entity_id_to_metadata_id: dict[str, int | None] = { entity_id: single_metadata_id } - oldest_ts: float | None = None + run_start_ts: float | None = None if include_start_time_state and not ( - oldest_ts := _get_oldest_possible_ts(hass, start_time) + run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) ): include_start_time_state = False start_time_ts = dt_util.utc_to_timestamp(start_time) @@ -425,7 +426,7 @@ def state_changes_during_period( no_attributes, limit, include_start_time_state, - oldest_ts, + run_start_ts, has_last_reported, ), track_on=[ @@ -599,17 +600,17 @@ def _get_start_time_state_for_entities_stmt( ) -def _get_oldest_possible_ts( +def _get_run_start_ts_for_utc_point_in_time( hass: HomeAssistant, utc_point_in_time: datetime ) -> float | None: - """Return the oldest possible timestamp. - - Returns None if there are no states as old as utc_point_in_time. - """ - - oldest_ts = get_instance(hass).states_manager.oldest_ts - if oldest_ts is not None and oldest_ts < utc_point_in_time.timestamp(): - return oldest_ts + """Return the start time of a run.""" + run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) + if ( + run is not None + and (run_start := process_timestamp(run.start)) < utc_point_in_time + ): + return run_start.timestamp() + # History did not run before utc_point_in_time but we still return None diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 28a5a2ed32d..329f48e5455 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -123,9 +123,6 @@ def purge_old_data( _purge_old_entity_ids(instance, session) _purge_old_recorder_runs(instance, session, purge_before) - with session_scope(session=instance.get_session(), read_only=True) as session: - instance.recorder_runs_manager.load_from_db(session) - instance.states_manager.load_from_db(session) if repack: repack_database(instance) return True diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 8ca7bef2691..2e4b588a0b0 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -637,15 +637,6 @@ def find_states_to_purge( ) -def find_oldest_state() -> StatementLambdaElement: - """Find the last_updated_ts of the oldest state.""" - return lambda_stmt( - lambda: select(States.last_updated_ts).where( - States.state_id.in_(select(func.min(States.state_id))) - ) - ) - - def find_short_term_statistics_to_purge( purge_before: datetime, max_bind_vars: int ) -> StatementLambdaElement: diff --git a/homeassistant/components/recorder/table_managers/states.py b/homeassistant/components/recorder/table_managers/states.py index fafcfa0ea61..d5cef759c54 100644 --- a/homeassistant/components/recorder/table_managers/states.py +++ b/homeassistant/components/recorder/table_managers/states.py @@ -2,15 +2,7 @@ from __future__ import annotations -from collections.abc import Sequence -from typing import Any, cast - -from sqlalchemy.engine.row import Row -from sqlalchemy.orm.session import Session - from ..db_schema import States -from ..queries import find_oldest_state -from ..util import execute_stmt_lambda_element class StatesManager: @@ -21,12 +13,6 @@ class StatesManager: self._pending: dict[str, States] = {} self._last_committed_id: dict[str, int] = {} self._last_reported: dict[int, float] = {} - self._oldest_ts: float | None = None - - @property - def oldest_ts(self) -> float | None: - """Return the oldest timestamp.""" - return self._oldest_ts def pop_pending(self, entity_id: str) -> States | None: """Pop a pending state. @@ -58,8 +44,6 @@ class StatesManager: recorder thread. """ self._pending[entity_id] = state - if self._oldest_ts is None: - self._oldest_ts = state.last_updated_ts def update_pending_last_reported( self, state_id: int, last_reported_timestamp: float @@ -90,22 +74,6 @@ class StatesManager: """ self._last_committed_id.clear() self._pending.clear() - self._oldest_ts = None - - def load_from_db(self, session: Session) -> None: - """Update the cache. - - Must run in the recorder thread. - """ - result = cast( - Sequence[Row[Any]], - execute_stmt_lambda_element(session, find_oldest_state()), - ) - if not result: - ts = None - else: - ts = result[0].last_updated_ts - self._oldest_ts = ts def evict_purged_state_ids(self, purged_state_ids: set[int]) -> None: """Evict purged states from the committed states. diff --git a/homeassistant/components/recorder/tasks.py b/homeassistant/components/recorder/tasks.py index fa10c12aa68..783f0a80b8e 100644 --- a/homeassistant/components/recorder/tasks.py +++ b/homeassistant/components/recorder/tasks.py @@ -120,6 +120,8 @@ class PurgeTask(RecorderTask): if purge.purge_old_data( instance, self.purge_before, self.repack, self.apply_filter ): + with instance.get_session() as session: + instance.recorder_runs_manager.load_from_db(session) # We always need to do the db cleanups after a purge # is finished to ensure the WAL checkpoint and other # tasks happen after a vacuum. diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index f721a260c14..ca160e5201b 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -112,9 +112,6 @@ async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder) async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" - assert recorder_mock.states_manager.oldest_ts is None - oldest_ts = recorder_mock.states_manager.oldest_ts - await _add_test_states(hass) # make sure we start with 6 states @@ -130,10 +127,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert recorder_mock.states_manager.oldest_ts != oldest_ts - assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts - oldest_ts = recorder_mock.states_manager.oldest_ts - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id purge_before = dt_util.utcnow() - timedelta(days=4) @@ -147,8 +140,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished - # states_manager.oldest_ts is not updated until after the purge is complete - assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -171,8 +162,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> finished = purge_old_data(recorder_mock, purge_before, repack=False) assert finished - # states_manager.oldest_ts should now be updated - assert recorder_mock.states_manager.oldest_ts != oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -180,10 +169,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> assert states.count() == 2 assert state_attributes.count() == 1 - assert recorder_mock.states_manager.oldest_ts != oldest_ts - assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts - oldest_ts = recorder_mock.states_manager.oldest_ts - assert "test.recorder2" in recorder_mock.states_manager._last_committed_id # run purge_old_data again @@ -196,8 +181,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished - # states_manager.oldest_ts is not updated until after the purge is complete - assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: assert states.count() == 0 From e93256951efa7b908e7accb54588cbd2d178f356 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sat, 14 Dec 2024 20:51:30 +0100 Subject: [PATCH 347/677] Bump incomfort-client to v0.6.4 (#133205) --- homeassistant/components/incomfort/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/incomfort/manifest.json b/homeassistant/components/incomfort/manifest.json index 40c93012eef..f404f33b970 100644 --- a/homeassistant/components/incomfort/manifest.json +++ b/homeassistant/components/incomfort/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/incomfort", "iot_class": "local_polling", "loggers": ["incomfortclient"], - "requirements": ["incomfort-client==0.6.3-1"] + "requirements": ["incomfort-client==0.6.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6929deaf14e..bf353472fba 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1192,7 +1192,7 @@ ihcsdk==2.8.5 imgw_pib==1.0.6 # homeassistant.components.incomfort -incomfort-client==0.6.3-1 +incomfort-client==0.6.4 # homeassistant.components.influxdb influxdb-client==1.24.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 83070eb8030..2b851df1979 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1003,7 +1003,7 @@ ifaddr==0.2.0 imgw_pib==1.0.6 # homeassistant.components.incomfort -incomfort-client==0.6.3-1 +incomfort-client==0.6.4 # homeassistant.components.influxdb influxdb-client==1.24.0 From eb86b00dd40fa4f0009b15464128dcd4a2268705 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 14 Dec 2024 15:06:26 -0600 Subject: [PATCH 348/677] Bump yalexs-ble to 2.5.5 (#133229) changelog: https://github.com/bdraco/yalexs-ble/compare/v2.5.4...v2.5.5 --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yalexs_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index ed2c8007ee8..d0b41411c96 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 2ed1f4b5c43..7b7edfac77b 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.4"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] } diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index 1472f9035ea..b2c331397b3 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.4"] + "requirements": ["yalexs-ble==2.5.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index bf353472fba..9c69b883136 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3044,7 +3044,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.4 +yalexs-ble==2.5.5 # homeassistant.components.august # homeassistant.components.yale diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2b851df1979..3205ed5a290 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2433,7 +2433,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.4 +yalexs-ble==2.5.5 # homeassistant.components.august # homeassistant.components.yale From a48a5adc81aea979f64a0c8460fd9b84746abc61 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 15 Dec 2024 12:28:29 -0600 Subject: [PATCH 349/677] Set code_arm_required to False for homekit_controller (#133284) --- .../components/homekit_controller/alarm_control_panel.py | 1 + tests/components/homekit_controller/snapshots/test_init.ambr | 4 ++-- .../components/homekit_controller/test_alarm_control_panel.py | 2 ++ 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/homekit_controller/alarm_control_panel.py b/homeassistant/components/homekit_controller/alarm_control_panel.py index 3cb80f2c817..b17f122dfa5 100644 --- a/homeassistant/components/homekit_controller/alarm_control_panel.py +++ b/homeassistant/components/homekit_controller/alarm_control_panel.py @@ -69,6 +69,7 @@ class HomeKitAlarmControlPanelEntity(HomeKitEntity, AlarmControlPanelEntity): | AlarmControlPanelEntityFeature.ARM_AWAY | AlarmControlPanelEntityFeature.ARM_NIGHT ) + _attr_code_arm_required = False def get_characteristic_types(self) -> list[str]: """Define the homekit characteristics the entity cares about.""" diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index b96da507adf..2bd5e7faf75 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -1474,7 +1474,7 @@ 'state': dict({ 'attributes': dict({ 'changed_by': None, - 'code_arm_required': True, + 'code_arm_required': False, 'code_format': None, 'friendly_name': 'Aqara-Hub-E1-00A0 Security System', 'supported_features': , @@ -1848,7 +1848,7 @@ 'state': dict({ 'attributes': dict({ 'changed_by': None, - 'code_arm_required': True, + 'code_arm_required': False, 'code_format': None, 'friendly_name': 'Aqara Hub-1563 Security System', 'supported_features': , diff --git a/tests/components/homekit_controller/test_alarm_control_panel.py b/tests/components/homekit_controller/test_alarm_control_panel.py index 1e9f023fc46..3ab9dc82e41 100644 --- a/tests/components/homekit_controller/test_alarm_control_panel.py +++ b/tests/components/homekit_controller/test_alarm_control_panel.py @@ -6,6 +6,7 @@ from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes +from homeassistant.components.alarm_control_panel import ATTR_CODE_ARM_REQUIRED from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -106,6 +107,7 @@ async def test_switch_read_alarm_state( state = await helper.poll_and_get_state() assert state.state == "armed_home" assert state.attributes["battery_level"] == 50 + assert state.attributes[ATTR_CODE_ARM_REQUIRED] is False await helper.async_update( ServicesTypes.SECURITY_SYSTEM, From 97f22b3a3d04a50d8f9496a163191b04d58dd690 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 15 Dec 2024 19:26:46 +0100 Subject: [PATCH 350/677] Allow load_verify_locations with only cadata passed (#133299) --- homeassistant/block_async_io.py | 8 +++++++- tests/test_block_async_io.py | 6 ++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/homeassistant/block_async_io.py b/homeassistant/block_async_io.py index 7a68b2515e9..767716dbe27 100644 --- a/homeassistant/block_async_io.py +++ b/homeassistant/block_async_io.py @@ -50,6 +50,12 @@ def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool: return False +def _check_load_verify_locations_call_allowed(mapped_args: dict[str, Any]) -> bool: + # If only cadata is passed, we can ignore it + kwargs = mapped_args.get("kwargs") + return bool(kwargs and len(kwargs) == 1 and "cadata" in kwargs) + + @dataclass(slots=True, frozen=True) class BlockingCall: """Class to hold information about a blocking call.""" @@ -158,7 +164,7 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = ( original_func=SSLContext.load_verify_locations, object=SSLContext, function="load_verify_locations", - check_allowed=None, + check_allowed=_check_load_verify_locations_call_allowed, strict=False, strict_core=False, skip_for_tests=True, diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index dc2b096f595..dd23d4e9709 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -429,6 +429,12 @@ async def test_protect_loop_load_verify_locations( context.load_verify_locations("/dev/null") assert "Detected blocking call to load_verify_locations" in caplog.text + # ignore with only cadata + caplog.clear() + with pytest.raises(ssl.SSLError): + context.load_verify_locations(cadata="xxx") + assert "Detected blocking call to load_verify_locations" not in caplog.text + async def test_protect_loop_load_cert_chain( hass: HomeAssistant, caplog: pytest.LogCaptureFixture From 2bc917c8426ddf87b4197b256b20f8b9a5c4fd38 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Mon, 16 Dec 2024 18:06:06 +0000 Subject: [PATCH 351/677] Bump `imgw-pib` to version 1.0.7 (#133364) --- homeassistant/components/imgw_pib/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/imgw_pib/manifest.json b/homeassistant/components/imgw_pib/manifest.json index b5c35f3f1eb..ce3bc14d37b 100644 --- a/homeassistant/components/imgw_pib/manifest.json +++ b/homeassistant/components/imgw_pib/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/imgw_pib", "iot_class": "cloud_polling", - "requirements": ["imgw_pib==1.0.6"] + "requirements": ["imgw_pib==1.0.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9c69b883136..3ec404c8490 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1189,7 +1189,7 @@ iglo==1.2.7 ihcsdk==2.8.5 # homeassistant.components.imgw_pib -imgw_pib==1.0.6 +imgw_pib==1.0.7 # homeassistant.components.incomfort incomfort-client==0.6.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3205ed5a290..26966480444 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1000,7 +1000,7 @@ idasen-ha==2.6.2 ifaddr==0.2.0 # homeassistant.components.imgw_pib -imgw_pib==1.0.6 +imgw_pib==1.0.7 # homeassistant.components.incomfort incomfort-client==0.6.4 From a56ad0273b3c29e54d1d96b301ab870a34ee9db9 Mon Sep 17 00:00:00 2001 From: Jonas Fors Lellky Date: Tue, 17 Dec 2024 11:36:45 +0100 Subject: [PATCH 352/677] Fix fan setpoints for flexit_bacnet (#133388) --- .../components/flexit_bacnet/number.py | 52 ++++++++------ tests/components/flexit_bacnet/conftest.py | 20 +++--- .../flexit_bacnet/snapshots/test_number.ambr | 68 +++++++++---------- tests/components/flexit_bacnet/test_number.py | 8 +-- 4 files changed, 80 insertions(+), 68 deletions(-) diff --git a/homeassistant/components/flexit_bacnet/number.py b/homeassistant/components/flexit_bacnet/number.py index 6e6e2eea980..029ce896445 100644 --- a/homeassistant/components/flexit_bacnet/number.py +++ b/homeassistant/components/flexit_bacnet/number.py @@ -29,6 +29,8 @@ class FlexitNumberEntityDescription(NumberEntityDescription): """Describes a Flexit number entity.""" native_value_fn: Callable[[FlexitBACnet], float] + native_max_value_fn: Callable[[FlexitBACnet], int] + native_min_value_fn: Callable[[FlexitBACnet], int] set_native_value_fn: Callable[[FlexitBACnet], Callable[[int], Awaitable[None]]] @@ -37,121 +39,121 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( key="away_extract_fan_setpoint", translation_key="away_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_away, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_away, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="away_supply_fan_setpoint", translation_key="away_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_away, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_away, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="cooker_hood_extract_fan_setpoint", translation_key="cooker_hood_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_cooker, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="cooker_hood_supply_fan_setpoint", translation_key="cooker_hood_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_cooker, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="fireplace_extract_fan_setpoint", translation_key="fireplace_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_fire, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="fireplace_supply_fan_setpoint", translation_key="fireplace_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_fire, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda _: 30, ), FlexitNumberEntityDescription( key="high_extract_fan_setpoint", translation_key="high_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_high, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), ), FlexitNumberEntityDescription( key="high_supply_fan_setpoint", translation_key="high_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_high, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), ), FlexitNumberEntityDescription( key="home_extract_fan_setpoint", translation_key="home_extract_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_extract_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_home, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_away), ), FlexitNumberEntityDescription( key="home_supply_fan_setpoint", translation_key="home_supply_fan_setpoint", device_class=NumberDeviceClass.POWER_FACTOR, - native_min_value=0, - native_max_value=100, native_step=1, mode=NumberMode.SLIDER, native_value_fn=lambda device: device.fan_setpoint_supply_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_home, native_unit_of_measurement=PERCENTAGE, + native_max_value_fn=lambda _: 100, + native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_away), ), ) @@ -192,6 +194,16 @@ class FlexitNumber(FlexitEntity, NumberEntity): """Return the state of the number.""" return self.entity_description.native_value_fn(self.coordinator.device) + @property + def native_max_value(self) -> float: + """Return the native max value of the number.""" + return self.entity_description.native_max_value_fn(self.coordinator.device) + + @property + def native_min_value(self) -> float: + """Return the native min value of the number.""" + return self.entity_description.native_min_value_fn(self.coordinator.device) + async def async_set_native_value(self, value: float) -> None: """Update the current value.""" set_native_value_fn = self.entity_description.set_native_value_fn( diff --git a/tests/components/flexit_bacnet/conftest.py b/tests/components/flexit_bacnet/conftest.py index cc7c9fa0570..c12559ef3ae 100644 --- a/tests/components/flexit_bacnet/conftest.py +++ b/tests/components/flexit_bacnet/conftest.py @@ -68,16 +68,16 @@ def mock_flexit_bacnet() -> Generator[AsyncMock]: flexit_bacnet.electric_heater = True # Mock fan setpoints - flexit_bacnet.fan_setpoint_extract_air_fire = 10 - flexit_bacnet.fan_setpoint_supply_air_fire = 20 - flexit_bacnet.fan_setpoint_extract_air_away = 30 - flexit_bacnet.fan_setpoint_supply_air_away = 40 - flexit_bacnet.fan_setpoint_extract_air_home = 50 - flexit_bacnet.fan_setpoint_supply_air_home = 60 - flexit_bacnet.fan_setpoint_extract_air_high = 70 - flexit_bacnet.fan_setpoint_supply_air_high = 80 - flexit_bacnet.fan_setpoint_extract_air_cooker = 90 - flexit_bacnet.fan_setpoint_supply_air_cooker = 100 + flexit_bacnet.fan_setpoint_extract_air_fire = 56 + flexit_bacnet.fan_setpoint_supply_air_fire = 77 + flexit_bacnet.fan_setpoint_extract_air_away = 40 + flexit_bacnet.fan_setpoint_supply_air_away = 42 + flexit_bacnet.fan_setpoint_extract_air_home = 70 + flexit_bacnet.fan_setpoint_supply_air_home = 74 + flexit_bacnet.fan_setpoint_extract_air_high = 100 + flexit_bacnet.fan_setpoint_supply_air_high = 100 + flexit_bacnet.fan_setpoint_extract_air_cooker = 50 + flexit_bacnet.fan_setpoint_supply_air_cooker = 70 yield flexit_bacnet diff --git a/tests/components/flexit_bacnet/snapshots/test_number.ambr b/tests/components/flexit_bacnet/snapshots/test_number.ambr index c4fb1e7c434..78eefd08345 100644 --- a/tests/components/flexit_bacnet/snapshots/test_number.ambr +++ b/tests/components/flexit_bacnet/snapshots/test_number.ambr @@ -5,8 +5,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 100, - 'min': 0, + 'max': 70, + 'min': 30, 'mode': , 'step': 1, }), @@ -42,8 +42,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Device Name Away extract fan setpoint', - 'max': 100, - 'min': 0, + 'max': 70, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -53,7 +53,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '30', + 'state': '40', }) # --- # name: test_numbers[number.device_name_away_supply_fan_setpoint-entry] @@ -62,8 +62,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 100, - 'min': 0, + 'max': 74, + 'min': 30, 'mode': , 'step': 1, }), @@ -99,8 +99,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', 'friendly_name': 'Device Name Away supply fan setpoint', - 'max': 100, - 'min': 0, + 'max': 74, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -110,7 +110,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '40', + 'state': '42', }) # --- # name: test_numbers[number.device_name_cooker_hood_extract_fan_setpoint-entry] @@ -120,7 +120,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -157,7 +157,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Cooker hood extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -167,7 +167,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '90', + 'state': '50', }) # --- # name: test_numbers[number.device_name_cooker_hood_supply_fan_setpoint-entry] @@ -177,7 +177,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -214,7 +214,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Cooker hood supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -224,7 +224,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '70', }) # --- # name: test_numbers[number.device_name_fireplace_extract_fan_setpoint-entry] @@ -234,7 +234,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -271,7 +271,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Fireplace extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -281,7 +281,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '10', + 'state': '56', }) # --- # name: test_numbers[number.device_name_fireplace_supply_fan_setpoint-entry] @@ -291,7 +291,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, }), @@ -328,7 +328,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Fireplace supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 30, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -338,7 +338,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '20', + 'state': '77', }) # --- # name: test_numbers[number.device_name_high_extract_fan_setpoint-entry] @@ -348,7 +348,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 70, 'mode': , 'step': 1, }), @@ -385,7 +385,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name High extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 70, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -395,7 +395,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '70', + 'state': '100', }) # --- # name: test_numbers[number.device_name_high_supply_fan_setpoint-entry] @@ -405,7 +405,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 74, 'mode': , 'step': 1, }), @@ -442,7 +442,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name High supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 74, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -452,7 +452,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '80', + 'state': '100', }) # --- # name: test_numbers[number.device_name_home_extract_fan_setpoint-entry] @@ -462,7 +462,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 40, 'mode': , 'step': 1, }), @@ -499,7 +499,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Home extract fan setpoint', 'max': 100, - 'min': 0, + 'min': 40, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -509,7 +509,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '50', + 'state': '70', }) # --- # name: test_numbers[number.device_name_home_supply_fan_setpoint-entry] @@ -519,7 +519,7 @@ 'area_id': None, 'capabilities': dict({ 'max': 100, - 'min': 0, + 'min': 42, 'mode': , 'step': 1, }), @@ -556,7 +556,7 @@ 'device_class': 'power_factor', 'friendly_name': 'Device Name Home supply fan setpoint', 'max': 100, - 'min': 0, + 'min': 42, 'mode': , 'step': 1, 'unit_of_measurement': '%', @@ -566,6 +566,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '60', + 'state': '74', }) # --- diff --git a/tests/components/flexit_bacnet/test_number.py b/tests/components/flexit_bacnet/test_number.py index ad49908fa96..f566b623f12 100644 --- a/tests/components/flexit_bacnet/test_number.py +++ b/tests/components/flexit_bacnet/test_number.py @@ -64,21 +64,21 @@ async def test_numbers_implementation( assert len(mocked_method.mock_calls) == 1 assert hass.states.get(ENTITY_ID).state == "60" - mock_flexit_bacnet.fan_setpoint_supply_air_fire = 10 + mock_flexit_bacnet.fan_setpoint_supply_air_fire = 40 await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: ENTITY_ID, - ATTR_VALUE: 10, + ATTR_VALUE: 40, }, blocking=True, ) mocked_method = getattr(mock_flexit_bacnet, "set_fan_setpoint_supply_air_fire") assert len(mocked_method.mock_calls) == 2 - assert hass.states.get(ENTITY_ID).state == "10" + assert hass.states.get(ENTITY_ID).state == "40" # Error recovery, when setting the value mock_flexit_bacnet.set_fan_setpoint_supply_air_fire.side_effect = DecodingError @@ -89,7 +89,7 @@ async def test_numbers_implementation( SERVICE_SET_VALUE, { ATTR_ENTITY_ID: ENTITY_ID, - ATTR_VALUE: 10, + ATTR_VALUE: 40, }, blocking=True, ) From b4015805f7fd0408aa4e047769de69e5c6e74651 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 11:10:38 +0100 Subject: [PATCH 353/677] Bump holidays to 0.63 (#133391) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 7edc140da11..33cae231595 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.62", "babel==2.15.0"] + "requirements": ["holidays==0.63", "babel==2.15.0"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 842c6f1f1ad..de9cbe694d8 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.62"] + "requirements": ["holidays==0.63"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3ec404c8490..2858c92d182 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1127,7 +1127,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.62 +holidays==0.63 # homeassistant.components.frontend home-assistant-frontend==20241127.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 26966480444..f8565afc4b6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -953,7 +953,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.62 +holidays==0.63 # homeassistant.components.frontend home-assistant-frontend==20241127.8 From 517f3faa0ac8697b53a36c9b8849509db4fd2ebd Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 17 Dec 2024 12:14:26 +0000 Subject: [PATCH 354/677] Bump version to 2024.12.4 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 391a02d07b4..21f805bae72 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 12 -PATCH_VERSION: Final = "3" +PATCH_VERSION: Final = "4" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index ef8ce79f894..6b640bce4d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.12.3" +version = "2024.12.4" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From a4588c80d56adef47ef511ebecdc310cc52a3211 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 13:18:26 +0100 Subject: [PATCH 355/677] Bump aiohasupervisor to version 0.2.2b2 (#133417) * Bump aiohasupervisor to version 0.2.2b2 * Update test --- homeassistant/components/hassio/backup.py | 2 +- homeassistant/components/hassio/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/hassio/test_backup.py | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 0353255fe7b..34c0701fdc4 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -211,7 +211,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): for agent_id in agent_ids if manager.backup_agents[agent_id].domain == DOMAIN ] - locations = {agent.location for agent in hassio_agents} + locations = [agent.location for agent in hassio_agents] backup = await self._client.backups.partial_backup( supervisor_backups.PartialBackupOptions( diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index 8fe124e763c..70230701965 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.2.2b0"], + "requirements": ["aiohasupervisor==0.2.2b2"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 65a6890024f..add20ef0870 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,7 +3,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.2b0 +aiohasupervisor==0.2.2b2 aiohttp-fast-zlib==0.2.0 aiohttp==3.11.10 aiohttp_cors==0.7.0 diff --git a/pyproject.toml b/pyproject.toml index 2930d381d2a..91acea30b52 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ # Integrations may depend on hassio integration without listing it to # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.2b0", + "aiohasupervisor==0.2.2b2", "aiohttp==3.11.10", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", diff --git a/requirements.txt b/requirements.txt index e80804569d3..e4346c3e517 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.2b0 +aiohasupervisor==0.2.2b2 aiohttp==3.11.10 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index f2ab0a938d9..2540a297334 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -261,7 +261,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b0 +aiohasupervisor==0.2.2b2 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c6631388041..fe528899ad3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -246,7 +246,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b0 +aiohasupervisor==0.2.2b2 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 3e928bc996b..ab708438e51 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -332,7 +332,7 @@ async def test_reader_writer_create( folders=None, homeassistant_exclude_database=False, homeassistant=True, - location={None}, + location=[None], name="Test", password=None, ) From 89946348df69b607edc920d7e33b471c7169ec1f Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Tue, 17 Dec 2024 13:54:07 +0100 Subject: [PATCH 356/677] Add reconfigure to Cookidoo integration (#133144) * add reconfigure * merge steps * comments --- .../components/cookidoo/config_flow.py | 75 +++++++-- .../components/cookidoo/quality_scale.yaml | 2 +- .../components/cookidoo/strings.json | 6 +- tests/components/cookidoo/test_config_flow.py | 158 ++++++++++++++++++ 4 files changed, 221 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/cookidoo/config_flow.py b/homeassistant/components/cookidoo/config_flow.py index 58e99a70907..120ab162a6c 100644 --- a/homeassistant/components/cookidoo/config_flow.py +++ b/homeassistant/components/cookidoo/config_flow.py @@ -17,7 +17,12 @@ from cookidoo_api import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + SOURCE_USER, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import ( @@ -58,26 +63,43 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): user_input: dict[str, Any] - async def async_step_user( - self, user_input: dict[str, Any] | None = None + async def async_step_reconfigure( + self, user_input: dict[str, Any] ) -> ConfigFlowResult: - """Handle the user step.""" + """Perform reconfigure upon an user action.""" + return await self.async_step_user(user_input) + + async def async_step_user( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Handle the user step as well as serve for reconfiguration.""" errors: dict[str, str] = {} if user_input is not None and not ( errors := await self.validate_input(user_input) ): - self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) + if self.source == SOURCE_USER: + self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]}) self.user_input = user_input return await self.async_step_language() await self.generate_country_schema() + suggested_values: dict = {} + if self.source == SOURCE_RECONFIGURE: + reconfigure_entry = self._get_reconfigure_entry() + suggested_values = { + **suggested_values, + **reconfigure_entry.data, + } + if user_input is not None: + suggested_values = {**suggested_values, **user_input} return self.async_show_form( step_id="user", data_schema=self.add_suggested_values_to_schema( data_schema=vol.Schema( {**AUTH_DATA_SCHEMA, **self.COUNTRY_DATA_SCHEMA} ), - suggested_values=user_input, + suggested_values=suggested_values, ), description_placeholders={"cookidoo": "Cookidoo"}, errors=errors, @@ -92,8 +114,18 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): if language_input is not None and not ( errors := await self.validate_input(self.user_input, language_input) ): - return self.async_create_entry( - title="Cookidoo", data={**self.user_input, **language_input} + if self.source == SOURCE_USER: + return self.async_create_entry( + title="Cookidoo", data={**self.user_input, **language_input} + ) + reconfigure_entry = self._get_reconfigure_entry() + return self.async_update_reload_and_abort( + reconfigure_entry, + data={ + **reconfigure_entry.data, + **self.user_input, + **language_input, + }, ) await self.generate_language_schema() @@ -169,24 +201,35 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN): async def validate_input( self, - user_input: Mapping[str, Any], - language_input: Mapping[str, Any] | None = None, + user_input: dict[str, Any], + language_input: dict[str, Any] | None = None, ) -> dict[str, str]: """Input Helper.""" errors: dict[str, str] = {} + data_input: dict[str, Any] = {} + + if self.source == SOURCE_RECONFIGURE: + reconfigure_entry = self._get_reconfigure_entry() + data_input = {**data_input, **reconfigure_entry.data} + data_input = {**data_input, **user_input} + if language_input: + data_input = {**data_input, **language_input} + else: + data_input[CONF_LANGUAGE] = ( + await get_localization_options(country=data_input[CONF_COUNTRY].lower()) + )[0] # Pick any language to test login + session = async_get_clientsession(self.hass) cookidoo = Cookidoo( session, CookidooConfig( - email=user_input[CONF_EMAIL], - password=user_input[CONF_PASSWORD], + email=data_input[CONF_EMAIL], + password=data_input[CONF_PASSWORD], localization=CookidooLocalizationConfig( - country_code=user_input[CONF_COUNTRY].lower(), - language=language_input[CONF_LANGUAGE] - if language_input - else "de-ch", + country_code=data_input[CONF_COUNTRY].lower(), + language=data_input[CONF_LANGUAGE], ), ), ) diff --git a/homeassistant/components/cookidoo/quality_scale.yaml b/homeassistant/components/cookidoo/quality_scale.yaml index 25069c87c46..95a35829079 100644 --- a/homeassistant/components/cookidoo/quality_scale.yaml +++ b/homeassistant/components/cookidoo/quality_scale.yaml @@ -66,7 +66,7 @@ rules: diagnostics: todo exception-translations: done icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done dynamic-devices: status: exempt comment: No dynamic entities available diff --git a/homeassistant/components/cookidoo/strings.json b/homeassistant/components/cookidoo/strings.json index 19f709ddaf8..14344bed13d 100644 --- a/homeassistant/components/cookidoo/strings.json +++ b/homeassistant/components/cookidoo/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "user": { - "title": "Login to {cookidoo}", + "title": "Setup {cookidoo}", "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]", @@ -11,11 +11,11 @@ "data_description": { "email": "Email used to access your {cookidoo} account.", "password": "Password used to access your {cookidoo} account.", - "country": "Pick your language for the {cookidoo} content." + "country": "Pick your country for the {cookidoo} content." } }, "language": { - "title": "Set language for {cookidoo}", + "title": "Setup {cookidoo}", "data": { "language": "[%key:common::config_flow::data::language%]" }, diff --git a/tests/components/cookidoo/test_config_flow.py b/tests/components/cookidoo/test_config_flow.py index cfdc284dbfe..0057bb3767e 100644 --- a/tests/components/cookidoo/test_config_flow.py +++ b/tests/components/cookidoo/test_config_flow.py @@ -16,6 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import COUNTRY, EMAIL, LANGUAGE, PASSWORD +from .test_init import setup_integration from tests.common import MockConfigEntry @@ -182,6 +183,163 @@ async def test_flow_user_init_data_already_configured( assert result["reason"] == "already_configured" +async def test_flow_reconfigure_success( + hass: HomeAssistant, + cookidoo_config_entry: AsyncMock, + mock_cookidoo_client: AsyncMock, +) -> None: + """Test we get the reconfigure flow and create entry with success.""" + cookidoo_config_entry.add_to_hass(hass) + await setup_integration(hass, cookidoo_config_entry) + + result = await cookidoo_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert cookidoo_config_entry.data == { + **MOCK_DATA_USER_STEP, + CONF_COUNTRY: "DE", + CONF_LANGUAGE: "de-DE", + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_reconfigure_init_data_unknown_error_and_recover_on_step_1( + hass: HomeAssistant, + cookidoo_config_entry: AsyncMock, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.login.side_effect = raise_error + + cookidoo_config_entry.add_to_hass(hass) + await setup_integration(hass, cookidoo_config_entry) + + result = await cookidoo_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert cookidoo_config_entry.data == { + **MOCK_DATA_USER_STEP, + CONF_COUNTRY: "DE", + CONF_LANGUAGE: "de-DE", + } + assert len(hass.config_entries.async_entries()) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (CookidooRequestException(), "cannot_connect"), + (CookidooException(), "unknown"), + (IndexError(), "unknown"), + ], +) +async def test_flow_reconfigure_init_data_unknown_error_and_recover_on_step_2( + hass: HomeAssistant, + cookidoo_config_entry: AsyncMock, + mock_cookidoo_client: AsyncMock, + raise_error: Exception, + text_error: str, +) -> None: + """Test unknown errors.""" + mock_cookidoo_client.get_additional_items.side_effect = raise_error + + cookidoo_config_entry.add_to_hass(hass) + await setup_integration(hass, cookidoo_config_entry) + + result = await cookidoo_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "cookidoo" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={**MOCK_DATA_USER_STEP, CONF_COUNTRY: "DE"}, + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "language" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_cookidoo_client.get_additional_items.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LANGUAGE: "de-DE"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert cookidoo_config_entry.data == { + **MOCK_DATA_USER_STEP, + CONF_COUNTRY: "DE", + CONF_LANGUAGE: "de-DE", + } + assert len(hass.config_entries.async_entries()) == 1 + + async def test_flow_reauth( hass: HomeAssistant, mock_cookidoo_client: AsyncMock, From 8b3cd41396942d1e644374425160b697fb6653a4 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 13:55:04 +0100 Subject: [PATCH 357/677] Improve hassio backup agent test coverage (#133424) --- tests/components/hassio/test_backup.py | 380 ++++++++++++++++++++++--- 1 file changed, 334 insertions(+), 46 deletions(-) diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index ab708438e51..9995425e6e1 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -1,13 +1,18 @@ """Test supervisor backup functionality.""" from collections.abc import AsyncGenerator, Generator +from dataclasses import replace from datetime import datetime from io import StringIO import os from typing import Any from unittest.mock import AsyncMock, patch -from aiohasupervisor.models import backups as supervisor_backups +from aiohasupervisor.models import ( + backups as supervisor_backups, + mounts as supervisor_mounts, +) +from aiohasupervisor.models.mounts import MountsInfo import pytest from homeassistant.components.backup import ( @@ -67,6 +72,94 @@ TEST_BACKUP_DETAILS = supervisor_backups.BackupComplete( type=TEST_BACKUP.type, ) +TEST_BACKUP_2 = supervisor_backups.Backup( + compressed=False, + content=supervisor_backups.BackupContent( + addons=["ssl"], + folders=["share"], + homeassistant=False, + ), + date=datetime.fromisoformat("1970-01-01T00:00:00Z"), + location=None, + locations={None}, + name="Test", + protected=False, + size=1.0, + size_bytes=1048576, + slug="abc123", + type=supervisor_backups.BackupType.PARTIAL, +) +TEST_BACKUP_DETAILS_2 = supervisor_backups.BackupComplete( + addons=[ + supervisor_backups.BackupAddon( + name="Terminal & SSH", + size=0.0, + slug="core_ssh", + version="9.14.0", + ) + ], + compressed=TEST_BACKUP_2.compressed, + date=TEST_BACKUP_2.date, + extra=None, + folders=["share"], + homeassistant_exclude_database=False, + homeassistant=None, + location=TEST_BACKUP_2.location, + locations=TEST_BACKUP_2.locations, + name=TEST_BACKUP_2.name, + protected=TEST_BACKUP_2.protected, + repositories=[], + size=TEST_BACKUP_2.size, + size_bytes=TEST_BACKUP_2.size_bytes, + slug=TEST_BACKUP_2.slug, + supervisor_version="2024.11.2", + type=TEST_BACKUP_2.type, +) + +TEST_BACKUP_3 = supervisor_backups.Backup( + compressed=False, + content=supervisor_backups.BackupContent( + addons=["ssl"], + folders=["share"], + homeassistant=True, + ), + date=datetime.fromisoformat("1970-01-01T00:00:00Z"), + location="share", + locations={"share"}, + name="Test", + protected=False, + size=1.0, + size_bytes=1048576, + slug="abc123", + type=supervisor_backups.BackupType.PARTIAL, +) +TEST_BACKUP_DETAILS_3 = supervisor_backups.BackupComplete( + addons=[ + supervisor_backups.BackupAddon( + name="Terminal & SSH", + size=0.0, + slug="core_ssh", + version="9.14.0", + ) + ], + compressed=TEST_BACKUP_3.compressed, + date=TEST_BACKUP_3.date, + extra=None, + folders=["share"], + homeassistant_exclude_database=False, + homeassistant=None, + location=TEST_BACKUP_3.location, + locations=TEST_BACKUP_3.locations, + name=TEST_BACKUP_3.name, + protected=TEST_BACKUP_3.protected, + repositories=[], + size=TEST_BACKUP_3.size, + size_bytes=TEST_BACKUP_3.size_bytes, + slug=TEST_BACKUP_3.slug, + supervisor_version="2024.11.2", + type=TEST_BACKUP_3.type, +) + @pytest.fixture(autouse=True) def fixture_supervisor_environ() -> Generator[None]: @@ -76,73 +169,160 @@ def fixture_supervisor_environ() -> Generator[None]: @pytest.fixture(autouse=True) -async def setup_integration( +async def hassio_enabled( hass: HomeAssistant, supervisor_client: AsyncMock ) -> AsyncGenerator[None]: - """Set up Backup integration.""" + """Enable hassio.""" with ( patch("homeassistant.components.backup.is_hassio", return_value=True), patch("homeassistant.components.backup.backup.is_hassio", return_value=True), ): - assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) - await hass.async_block_till_done() yield +@pytest.fixture +async def setup_integration( + hass: HomeAssistant, hassio_enabled: None, supervisor_client: AsyncMock +) -> AsyncGenerator[None]: + """Set up Backup integration.""" + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) + await hass.async_block_till_done() + + @pytest.mark.usefixtures("hassio_client") +@pytest.mark.parametrize( + ("mounts", "expected_agents"), + [ + (MountsInfo(default_backup_mount=None, mounts=[]), ["hassio.local"]), + ( + MountsInfo( + default_backup_mount=None, + mounts=[ + supervisor_mounts.CIFSMountResponse( + share="test", + name="test", + read_only=False, + state=supervisor_mounts.MountState.ACTIVE, + user_path="test", + usage=supervisor_mounts.MountUsage.BACKUP, + server="test", + type=supervisor_mounts.MountType.CIFS, + ) + ], + ), + ["hassio.local", "hassio.test"], + ), + ( + MountsInfo( + default_backup_mount=None, + mounts=[ + supervisor_mounts.CIFSMountResponse( + share="test", + name="test", + read_only=False, + state=supervisor_mounts.MountState.ACTIVE, + user_path="test", + usage=supervisor_mounts.MountUsage.MEDIA, + server="test", + type=supervisor_mounts.MountType.CIFS, + ) + ], + ), + ["hassio.local"], + ), + ], +) async def test_agent_info( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + mounts: MountsInfo, + expected_agents: list[str], ) -> None: """Test backup agent info.""" client = await hass_ws_client(hass) + supervisor_client.mounts.info.return_value = mounts + + assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}}) await client.send_json_auto_id({"type": "backup/agents/info"}) response = await client.receive_json() assert response["success"] assert response["result"] == { - "agents": [{"agent_id": "hassio.local"}], + "agents": [{"agent_id": agent_id} for agent_id in expected_agents], } -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("backup", "backup_details", "expected_response"), + [ + ( + TEST_BACKUP, + TEST_BACKUP_DETAILS, + { + "addons": [ + {"name": "Terminal & SSH", "slug": "core_ssh", "version": "9.14.0"} + ], + "agent_ids": ["hassio.local"], + "backup_id": "abc123", + "database_included": True, + "date": "1970-01-01T00:00:00+00:00", + "failed_agent_ids": [], + "folders": ["share"], + "homeassistant_included": True, + "homeassistant_version": "2024.12.0", + "name": "Test", + "protected": False, + "size": 1048576, + "with_strategy_settings": False, + }, + ), + ( + TEST_BACKUP_2, + TEST_BACKUP_DETAILS_2, + { + "addons": [ + {"name": "Terminal & SSH", "slug": "core_ssh", "version": "9.14.0"} + ], + "agent_ids": ["hassio.local"], + "backup_id": "abc123", + "database_included": False, + "date": "1970-01-01T00:00:00+00:00", + "failed_agent_ids": [], + "folders": ["share"], + "homeassistant_included": False, + "homeassistant_version": None, + "name": "Test", + "protected": False, + "size": 1048576, + "with_strategy_settings": False, + }, + ), + ], +) async def test_agent_list_backups( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, supervisor_client: AsyncMock, + backup: supervisor_backups.Backup, + backup_details: supervisor_backups.BackupComplete, + expected_response: dict[str, Any], ) -> None: """Test agent list backups.""" client = await hass_ws_client(hass) - supervisor_client.backups.list.return_value = [TEST_BACKUP] - supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + supervisor_client.backups.list.return_value = [backup, TEST_BACKUP_3] + supervisor_client.backups.backup_info.return_value = backup_details await client.send_json_auto_id({"type": "backup/info"}) response = await client.receive_json() assert response["success"] - assert response["result"]["backups"] == [ - { - "addons": [ - {"name": "Terminal & SSH", "slug": "core_ssh", "version": "9.14.0"} - ], - "agent_ids": ["hassio.local"], - "backup_id": "abc123", - "database_included": True, - "date": "1970-01-01T00:00:00+00:00", - "failed_agent_ids": [], - "folders": ["share"], - "homeassistant_included": True, - "homeassistant_version": "2024.12.0", - "name": "Test", - "protected": False, - "size": 1048576, - "with_strategy_settings": False, - } - ] + assert response["result"]["backups"] == [expected_response] -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_agent_download( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -162,7 +342,26 @@ async def test_agent_download( assert await resp.content.read() == b"backup data" -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_agent_download_unavailable_backup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test agent download backup, when cloud user is logged in.""" + client = await hass_client() + backup_id = "abc123" + supervisor_client.backups.list.return_value = [TEST_BACKUP_3] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_3 + supervisor_client.backups.download_backup.return_value.__aiter__.return_value = ( + iter((b"backup data",)) + ) + + resp = await client.get(f"/api/backup/download/{backup_id}?agent_id=hassio.local") + assert resp.status == 404 + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_agent_upload( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -208,7 +407,7 @@ async def test_agent_upload( supervisor_client.backups.reload.assert_not_called() -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_agent_delete_backup( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -231,7 +430,7 @@ async def test_agent_delete_backup( supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") @pytest.mark.parametrize( ("event_data", "mount_info_calls"), [ @@ -293,11 +492,55 @@ async def test_agents_notify_on_mount_added_removed( assert supervisor_client.mounts.info.call_count == mount_info_calls -@pytest.mark.usefixtures("hassio_client") +DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( + addons=None, + background=True, + compressed=True, + folders=None, + homeassistant_exclude_database=False, + homeassistant=True, + location=[None], + name="Test", + password=None, +) + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("extra_generate_options", "expected_supervisor_options"), + [ + ( + {}, + DEFAULT_BACKUP_OPTIONS, + ), + ( + {"include_addons": ["addon_1", "addon_2"]}, + replace(DEFAULT_BACKUP_OPTIONS, addons={"addon_1", "addon_2"}), + ), + ( + {"include_all_addons": True}, + DEFAULT_BACKUP_OPTIONS, + ), + ( + {"include_database": False}, + replace(DEFAULT_BACKUP_OPTIONS, homeassistant_exclude_database=True), + ), + ( + {"include_folders": ["media", "share"]}, + replace(DEFAULT_BACKUP_OPTIONS, folders={"media", "share"}), + ), + ( + {"include_folders": ["media"], "include_homeassistant": False}, + replace(DEFAULT_BACKUP_OPTIONS, folders={"media"}, homeassistant=False), + ), + ], +) async def test_reader_writer_create( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, supervisor_client: AsyncMock, + extra_generate_options: dict[str, Any], + expected_supervisor_options: supervisor_backups.PartialBackupOptions, ) -> None: """Test generating a backup.""" client = await hass_ws_client(hass) @@ -312,6 +555,7 @@ async def test_reader_writer_create( await client.send_json_auto_id( {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"} + | extra_generate_options ) response = await client.receive_json() assert response["event"] == { @@ -325,17 +569,7 @@ async def test_reader_writer_create( assert response["result"] == {"backup_job_id": "abc123"} supervisor_client.backups.partial_backup.assert_called_once_with( - supervisor_backups.PartialBackupOptions( - addons=None, - background=True, - compressed=True, - folders=None, - homeassistant_exclude_database=False, - homeassistant=True, - location=[None], - name="Test", - password=None, - ) + expected_supervisor_options ) await client.send_json_auto_id( @@ -365,7 +599,61 @@ async def test_reader_writer_create( } -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("extra_generate_options"), + [ + {"include_homeassistant": False}, + ], +) +async def test_reader_writer_create_wrong_parameters( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + extra_generate_options: dict[str, Any], +) -> None: + """Test generating a backup.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["hassio.local"], "name": "Test"} + | extra_generate_options + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "failed", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "idle", + } + + response = await client.receive_json() + assert not response["success"] + assert response["error"] == {"code": "unknown_error", "message": "Unknown error"} + + supervisor_client.backups.partial_backup.assert_not_called() + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_reader_writer_restore( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -437,7 +725,7 @@ async def test_reader_writer_restore( ), ], ) -@pytest.mark.usefixtures("hassio_client") +@pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_reader_writer_restore_wrong_parameters( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, From 4adfd52dc0259d1a81c31cdffe9ed50cbef8c026 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 15:08:03 +0100 Subject: [PATCH 358/677] Improve hassio backup agent test coverage (#133426) --- homeassistant/components/hassio/backup.py | 1 + tests/components/hassio/test_backup.py | 249 +++++++++++++++++++++- 2 files changed, 248 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 34c0701fdc4..5127c0326cc 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -173,6 +173,7 @@ class SupervisorBackupAgent(BackupAgent): except SupervisorBadRequestError as err: if err.args[0] != "Backup does not exist": raise + _LOGGER.debug("Backup %s does not exist", backup_id) class SupervisorBackupReaderWriter(BackupReaderWriter): diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 9995425e6e1..5b3f6ff44a2 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -1,13 +1,20 @@ """Test supervisor backup functionality.""" -from collections.abc import AsyncGenerator, Generator +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Callable, + Coroutine, + Generator, +) from dataclasses import replace from datetime import datetime from io import StringIO import os from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch +from aiohasupervisor.exceptions import SupervisorBadRequestError from aiohasupervisor.models import ( backups as supervisor_backups, mounts as supervisor_mounts, @@ -19,13 +26,17 @@ from homeassistant.components.backup import ( DOMAIN as BACKUP_DOMAIN, AddonInfo, AgentBackup, + BackupAgent, + BackupAgentPlatformProtocol, Folder, ) +from homeassistant.components.hassio.backup import LOCATION_CLOUD_BACKUP from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from .test_init import MOCK_ENVIRON +from tests.common import mock_platform from tests.typing import ClientSessionGenerator, WebSocketGenerator TEST_BACKUP = supervisor_backups.Backup( @@ -189,6 +200,57 @@ async def setup_integration( await hass.async_block_till_done() +class BackupAgentTest(BackupAgent): + """Test backup agent.""" + + domain = "test" + + def __init__(self, name: str) -> None: + """Initialize the backup agent.""" + self.name = name + + async def async_download_backup( + self, backup_id: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + """Download a backup file.""" + return AsyncMock(spec_set=["__aiter__"]) + + async def async_upload_backup( + self, + *, + open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], + backup: AgentBackup, + **kwargs: Any, + ) -> None: + """Upload a backup.""" + await open_stream() + + async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: + """List backups.""" + return [] + + async def async_get_backup( + self, backup_id: str, **kwargs: Any + ) -> AgentBackup | None: + """Return a backup.""" + return None + + async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: + """Delete a backup file.""" + + +async def _setup_backup_platform( + hass: HomeAssistant, + *, + domain: str, + platform: BackupAgentPlatformProtocol, +) -> None: + """Set up a mock domain.""" + mock_platform(hass, f"{domain}.backup", platform) + assert await async_setup_component(hass, domain, {}) + await hass.async_block_till_done() + + @pytest.mark.usefixtures("hassio_client") @pytest.mark.parametrize( ("mounts", "expected_agents"), @@ -405,6 +467,8 @@ async def test_agent_upload( assert resp.status == 201 supervisor_client.backups.reload.assert_not_called() + supervisor_client.backups.download_backup.assert_not_called() + supervisor_client.backups.remove_backup.assert_not_called() @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -430,6 +494,50 @@ async def test_agent_delete_backup( supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) +@pytest.mark.usefixtures("hassio_client", "setup_integration") +@pytest.mark.parametrize( + ("remove_side_effect", "expected_response"), + [ + ( + SupervisorBadRequestError("blah"), + { + "success": False, + "error": {"code": "unknown_error", "message": "Unknown error"}, + }, + ), + ( + SupervisorBadRequestError("Backup does not exist"), + { + "success": True, + "result": {"agent_errors": {}}, + }, + ), + ], +) +async def test_agent_delete_with_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, + remove_side_effect: Exception, + expected_response: dict[str, Any], +) -> None: + """Test agent delete backup.""" + client = await hass_ws_client(hass) + backup_id = "abc123" + + supervisor_client.backups.remove_backup.side_effect = remove_side_effect + await client.send_json_auto_id( + { + "type": "backup/delete", + "backup_id": backup_id, + } + ) + response = await client.receive_json() + + assert response == {"id": 1, "type": "result"} | expected_response + supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) + + @pytest.mark.usefixtures("hassio_client", "setup_integration") @pytest.mark.parametrize( ("event_data", "mount_info_calls"), @@ -598,6 +706,84 @@ async def test_reader_writer_create( "state": "completed", } + supervisor_client.backups.download_backup.assert_not_called() + supervisor_client.backups.remove_backup.assert_not_called() + + +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_reader_writer_create_remote_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test generating a backup which will be uploaded to a remote agent.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_backup.return_value.job_id = "abc123" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + + remote_agent = BackupAgentTest("remote") + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["test.remote"], "name": "Test"} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "in_progress", + } + + response = await client.receive_json() + assert response["success"] + assert response["result"] == {"backup_job_id": "abc123"} + + supervisor_client.backups.partial_backup.assert_called_once_with( + replace(DEFAULT_BACKUP_OPTIONS, location=LOCATION_CLOUD_BACKUP), + ) + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": { + "event": "job", + "data": {"done": True, "uuid": "abc123", "reference": "test_slug"}, + }, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": "upload_to_agents", + "state": "in_progress", + } + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "create_backup", + "stage": None, + "state": "completed", + } + + supervisor_client.backups.download_backup.assert_called_once_with("test_slug") + supervisor_client.backups.remove_backup.assert_called_once_with("test_slug") + @pytest.mark.usefixtures("hassio_client", "setup_integration") @pytest.mark.parametrize( @@ -653,6 +839,65 @@ async def test_reader_writer_create_wrong_parameters( supervisor_client.backups.partial_backup.assert_not_called() +@pytest.mark.usefixtures("hassio_client", "setup_integration") +async def test_agent_receive_remote_backup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test receiving a backup which will be uploaded to a remote agent.""" + client = await hass_client() + backup_id = "test-backup" + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS + supervisor_client.backups.upload_backup.return_value = "test_slug" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0.0, + ) + + remote_agent = BackupAgentTest("remote") + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + supervisor_client.backups.reload.assert_not_called() + with ( + patch("pathlib.Path.mkdir"), + patch("pathlib.Path.open"), + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("shutil.copy"), + ): + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=test.remote", + data={"file": StringIO("test")}, + ) + + assert resp.status == 201 + + supervisor_client.backups.download_backup.assert_called_once_with("test_slug") + supervisor_client.backups.remove_backup.assert_called_once_with("test_slug") + + @pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_reader_writer_restore( hass: HomeAssistant, From 9cc5f7ff843cea9d4ac254ea8b17d9a646767ebb Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Tue, 17 Dec 2024 15:41:34 +0100 Subject: [PATCH 359/677] Mark lamarzocco as platinum quality (#131609) --- homeassistant/components/lamarzocco/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 0d2111a2026..7505843850c 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -36,5 +36,6 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], + "quality_scale": "platinum", "requirements": ["pylamarzocco==1.4.0"] } From a9f6982ac0814a3733088b7364981bc7f184deec Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Tue, 17 Dec 2024 15:45:16 +0100 Subject: [PATCH 360/677] Mark acaia as platinum quality (#131723) Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/acaia/manifest.json | 1 + homeassistant/components/acaia/quality_scale.yaml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json index c1f1fdd7a81..36551e9c695 100644 --- a/homeassistant/components/acaia/manifest.json +++ b/homeassistant/components/acaia/manifest.json @@ -25,5 +25,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aioacaia"], + "quality_scale": "platinum", "requirements": ["aioacaia==0.1.11"] } diff --git a/homeassistant/components/acaia/quality_scale.yaml b/homeassistant/components/acaia/quality_scale.yaml index 9f9f8da8d5d..62573e38799 100644 --- a/homeassistant/components/acaia/quality_scale.yaml +++ b/homeassistant/components/acaia/quality_scale.yaml @@ -16,7 +16,7 @@ rules: No custom actions are defined. docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done entity-event-setup: status: exempt comment: | From 5b1c5bf9f6aa742493b1b6f6f559fda0e45519b2 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 17 Dec 2024 16:34:48 +0100 Subject: [PATCH 361/677] Record current IQS scale for Tailwind (#133158) Co-authored-by: Joost Lekkerkerker --- .../components/tailwind/quality_scale.yaml | 76 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/tailwind/quality_scale.yaml diff --git a/homeassistant/components/tailwind/quality_scale.yaml b/homeassistant/components/tailwind/quality_scale.yaml new file mode 100644 index 00000000000..90c5d0d5837 --- /dev/null +++ b/homeassistant/components/tailwind/quality_scale.yaml @@ -0,0 +1,76 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Integration does not register custom actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: todo + docs-high-level-description: todo + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: done + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: + status: exempt + comment: | + The coordinator needs translation when the update failed. + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 83335fa5c44..23320632a1a 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -1006,7 +1006,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "systemmonitor", "tado", "tailscale", - "tailwind", "tami4", "tank_utility", "tankerkoenig", From a14aca31e534f998bcc7e55976e1b9c7d9c6ffba Mon Sep 17 00:00:00 2001 From: Krisjanis Lejejs Date: Tue, 17 Dec 2024 15:44:50 +0000 Subject: [PATCH 362/677] Add MFA login flow support for cloud component (#132497) * Add MFA login flow support for cloud component * Add tests for cloud MFA login * Update code to reflect used package changes * Update code to use underlying package changes * Remove unused change * Fix login required parameters * Fix parameter validation * Use cv.has_at_least_one_key for param validation --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/cloud/const.py | 2 + homeassistant/components/cloud/http_api.py | 58 ++++++++- tests/components/cloud/test_http_api.py | 129 ++++++++++++++++++++- 3 files changed, 186 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/cloud/const.py b/homeassistant/components/cloud/const.py index 4392bf94827..65d239f2b10 100644 --- a/homeassistant/components/cloud/const.py +++ b/homeassistant/components/cloud/const.py @@ -88,3 +88,5 @@ DISPATCHER_REMOTE_UPDATE: SignalType[Any] = SignalType("cloud_remote_update") STT_ENTITY_UNIQUE_ID = "cloud-speech-to-text" TTS_ENTITY_UNIQUE_ID = "cloud-text-to-speech" + +LOGIN_MFA_TIMEOUT = 60 diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index 4f2ad0ddcf7..2f49d261792 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -9,6 +9,7 @@ import dataclasses from functools import wraps from http import HTTPStatus import logging +import time from typing import Any, Concatenate import aiohttp @@ -31,6 +32,7 @@ from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.util.location import async_detect_location_info @@ -39,6 +41,7 @@ from .assist_pipeline import async_create_cloud_pipeline from .client import CloudClient from .const import ( DATA_CLOUD, + LOGIN_MFA_TIMEOUT, PREF_ALEXA_REPORT_STATE, PREF_DISABLE_2FA, PREF_ENABLE_ALEXA, @@ -69,6 +72,10 @@ _CLOUD_ERRORS: dict[type[Exception], tuple[HTTPStatus, str]] = { } +class MFAExpiredOrNotStarted(auth.CloudError): + """Multi-factor authentication expired, or not started.""" + + @callback def async_setup(hass: HomeAssistant) -> None: """Initialize the HTTP API.""" @@ -101,6 +108,11 @@ def async_setup(hass: HomeAssistant) -> None: _CLOUD_ERRORS.update( { + auth.InvalidTotpCode: (HTTPStatus.BAD_REQUEST, "Invalid TOTP code."), + auth.MFARequired: ( + HTTPStatus.UNAUTHORIZED, + "Multi-factor authentication required.", + ), auth.UserNotFound: (HTTPStatus.BAD_REQUEST, "User does not exist."), auth.UserNotConfirmed: (HTTPStatus.BAD_REQUEST, "Email not confirmed."), auth.UserExists: ( @@ -112,6 +124,10 @@ def async_setup(hass: HomeAssistant) -> None: HTTPStatus.BAD_REQUEST, "Password change required.", ), + MFAExpiredOrNotStarted: ( + HTTPStatus.BAD_REQUEST, + "Multi-factor authentication expired, or not started. Please try again.", + ), } ) @@ -206,19 +222,57 @@ class GoogleActionsSyncView(HomeAssistantView): class CloudLoginView(HomeAssistantView): """Login to Home Assistant cloud.""" + _mfa_tokens: dict[str, str] = {} + _mfa_tokens_set_time: float = 0 + url = "/api/cloud/login" name = "api:cloud:login" @require_admin @_handle_cloud_errors @RequestDataValidator( - vol.Schema({vol.Required("email"): str, vol.Required("password"): str}) + vol.Schema( + vol.All( + { + vol.Required("email"): str, + vol.Exclusive("password", "login"): str, + vol.Exclusive("code", "login"): str, + }, + cv.has_at_least_one_key("password", "code"), + ) + ) ) async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Handle login request.""" hass = request.app[KEY_HASS] cloud = hass.data[DATA_CLOUD] - await cloud.login(data["email"], data["password"]) + + try: + email = data["email"] + password = data.get("password") + code = data.get("code") + + if email and password: + await cloud.login(email, password) + + else: + if ( + not self._mfa_tokens + or time.time() - self._mfa_tokens_set_time > LOGIN_MFA_TIMEOUT + ): + raise MFAExpiredOrNotStarted + + # Voluptuous should ensure that code is not None because password is + assert code is not None + + await cloud.login_verify_totp(email, code, self._mfa_tokens) + self._mfa_tokens = {} + self._mfa_tokens_set_time = 0 + + except auth.MFARequired as mfa_err: + self._mfa_tokens = mfa_err.mfa_tokens + self._mfa_tokens_set_time = time.time() + raise if "assist_pipeline" in hass.config.components: new_cloud_pipeline_id = await async_create_cloud_pipeline(hass) diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py index 216fc77db48..b35cc03ac73 100644 --- a/tests/components/cloud/test_http_api.py +++ b/tests/components/cloud/test_http_api.py @@ -8,7 +8,12 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch import aiohttp from hass_nabucasa import thingtalk -from hass_nabucasa.auth import Unauthenticated, UnknownError +from hass_nabucasa.auth import ( + InvalidTotpCode, + MFARequired, + Unauthenticated, + UnknownError, +) from hass_nabucasa.const import STATE_CONNECTED from hass_nabucasa.voice import TTS_VOICES import pytest @@ -378,6 +383,128 @@ async def test_login_view_invalid_credentials( assert req.status == HTTPStatus.UNAUTHORIZED +async def test_login_view_mfa_required( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when MFA is required.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={"session": "tokens"}) + + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + +async def test_login_view_mfa_required_tokens_missing( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when MFA is required, code is provided, but session tokens are missing.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={}) + + # Login with password and get MFA required error + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + # Login with TOTP code and get MFA expired error + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "code": "123346"}, + ) + + assert req.status == HTTPStatus.BAD_REQUEST + res = await req.json() + assert res["code"] == "mfaexpiredornotstarted" + + +async def test_login_view_mfa_password_and_totp_provided( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when password and TOTP code provided at once.""" + cloud_client = await hass_client() + + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "password": "my_password", "code": "123346"}, + ) + + assert req.status == HTTPStatus.BAD_REQUEST + + +async def test_login_view_invalid_totp_code( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in when MFA is required and invalid code is provided.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={"session": "tokens"}) + cloud.login_verify_totp.side_effect = InvalidTotpCode + + # Login with password and get MFA required error + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + # Login with TOTP code and get invalid TOTP code error + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "code": "123346"}, + ) + + assert req.status == HTTPStatus.BAD_REQUEST + res = await req.json() + assert res["code"] == "invalidtotpcode" + + +async def test_login_view_valid_totp_provided( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test logging in with valid TOTP code.""" + cloud_client = await hass_client() + cloud.login.side_effect = MFARequired(mfa_tokens={"session": "tokens"}) + + # Login with password and get MFA required error + req = await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert req.status == HTTPStatus.UNAUTHORIZED + res = await req.json() + assert res["code"] == "mfarequired" + + # Login with TOTP code and get success response + req = await cloud_client.post( + "/api/cloud/login", + json={"email": "my_username", "code": "123346"}, + ) + + assert req.status == HTTPStatus.OK + result = await req.json() + assert result == {"success": True, "cloud_pipeline": None} + + async def test_login_view_unknown_error( cloud: MagicMock, setup_cloud: None, From d9fb5a758232f3da6c0a86a6eb3fa684adabc22d Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Tue, 17 Dec 2024 17:10:04 +0100 Subject: [PATCH 363/677] Record current IQS state for SABnzbd (#131656) * Record current IQS state for SAbnzbd * Convert review comments to IQS comments --- .../components/sabnzbd/quality_scale.yaml | 96 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 96 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/sabnzbd/quality_scale.yaml diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml new file mode 100644 index 00000000000..c3fea2427ce --- /dev/null +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -0,0 +1,96 @@ +rules: + # Bronze + action-setup: + status: todo + comment: | + Do not remove services when all config entries are removed. + appropriate-polling: done + brands: done + common-modules: + status: todo + comment: | + const.py has unused variables. + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + The integration has deprecated the actions, thus the documentation has been removed. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: todo + comment: | + Raise ServiceValidationError in async_get_entry_for_service_call. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + The integration does not provide any additional options. + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: todo + test-coverage: + status: todo + comment: | + Coverage for loading and unloading config entries is missing. + + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered. + discovery: + status: exempt + comment: | + This integration cannot be discovered. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: + status: todo + comment: | + Describe the state of the sensor and make it a enum sensor. + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + The integration connects to a single service per configuration entry. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: done + repair-issues: done + stale-devices: + status: exempt + comment: | + This integration connect to a single service per configuration entry. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 23320632a1a..88e450409b4 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -887,7 +887,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "ruuvi_gateway", "ruuvitag_ble", "rympro", - "sabnzbd", "saj", "samsungtv", "sanix", From 44a86f537ff7f5d1f48bfc518a4c6d89de4c3ff4 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Tue, 17 Dec 2024 17:12:11 +0100 Subject: [PATCH 364/677] Add quality scale for Fronius (#131770) --- .../components/fronius/quality_scale.yaml | 89 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 89 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/fronius/quality_scale.yaml diff --git a/homeassistant/components/fronius/quality_scale.yaml b/homeassistant/components/fronius/quality_scale.yaml new file mode 100644 index 00000000000..2c4b892475b --- /dev/null +++ b/homeassistant/components/fronius/quality_scale.yaml @@ -0,0 +1,89 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: + status: done + comment: | + Single platform only, so no entity.py file. + CoordinatorEntity is used. + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + This integration does not subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not provide configuration options. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: done + comment: | + Coordinators are used and asyncio.Lock mutex across them ensure proper + rate limiting. Platforms are read-only. + reauthentication-flow: + status: exempt + comment: | + This integration doesn't require authentication. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + This integration doesn't have any known user-repairable issues. + stale-devices: done + # Platinum + async-dependency: done + inject-websession: done + strict-typing: + status: todo + comment: | + The pyfronius library isn't strictly typed and doesn't export type information. diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 88e450409b4..4e5cee2d16d 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -416,7 +416,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "freedompro", "fritzbox", "fritzbox_callmonitor", - "fronius", "frontier_silicon", "fujitsu_fglair", "fujitsu_hvac", From 25a63863cb1f1bdeb042d2883754ea89be2d692e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 17:21:13 +0100 Subject: [PATCH 365/677] Adapt hassio backup agent to supervisor changes (#133428) --- homeassistant/components/hassio/backup.py | 45 ++++++++++++++++++++--- tests/components/hassio/test_backup.py | 37 ++++++++++++++++--- 2 files changed, 70 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 5127c0326cc..4bc6dff44d2 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -8,7 +8,10 @@ import logging from pathlib import Path from typing import Any, cast -from aiohasupervisor.exceptions import SupervisorBadRequestError +from aiohasupervisor.exceptions import ( + SupervisorBadRequestError, + SupervisorNotFoundError, +) from aiohasupervisor.models import ( backups as supervisor_backups, mounts as supervisor_mounts, @@ -130,7 +133,10 @@ class SupervisorBackupAgent(BackupAgent): **kwargs: Any, ) -> AsyncIterator[bytes]: """Download a backup file.""" - return await self._client.backups.download_backup(backup_id) + return await self._client.backups.download_backup( + backup_id, + options=supervisor_backups.DownloadBackupOptions(location=self.location), + ) async def async_upload_backup( self, @@ -169,11 +175,18 @@ class SupervisorBackupAgent(BackupAgent): async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: """Remove a backup.""" try: - await self._client.backups.remove_backup(backup_id) + await self._client.backups.remove_backup( + backup_id, + options=supervisor_backups.RemoveBackupOptions( + location={self.location} + ), + ) except SupervisorBadRequestError as err: if err.args[0] != "Backup does not exist": raise _LOGGER.debug("Backup %s does not exist", backup_id) + except SupervisorNotFoundError: + _LOGGER.debug("Backup %s does not exist", backup_id) class SupervisorBackupReaderWriter(BackupReaderWriter): @@ -200,7 +213,11 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): """Create a backup.""" manager = self._hass.data[DATA_MANAGER] - include_addons_set = set(include_addons) if include_addons else None + include_addons_set: supervisor_backups.AddonSet | set[str] | None = None + if include_all_addons: + include_addons_set = supervisor_backups.AddonSet.ALL + elif include_addons: + include_addons_set = set(include_addons) include_folders_set = ( {supervisor_backups.Folder(folder) for folder in include_folders} if include_folders @@ -266,7 +283,12 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): async def remove_backup() -> None: if not remove_after_upload: return - await self._client.backups.remove_backup(backup_id) + await self._client.backups.remove_backup( + backup_id, + options=supervisor_backups.RemoveBackupOptions( + location={LOCATION_CLOUD_BACKUP} + ), + ) details = await self._client.backups.backup_info(backup_id) @@ -306,7 +328,12 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): async def remove_backup() -> None: if locations: return - await self._client.backups.remove_backup(backup_id) + await self._client.backups.remove_backup( + backup_id, + options=supervisor_backups.RemoveBackupOptions( + location={LOCATION_CLOUD_BACKUP} + ), + ) details = await self._client.backups.backup_info(backup_id) @@ -341,6 +368,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): ) manager = self._hass.data[DATA_MANAGER] + restore_location: str | None if manager.backup_agents[agent_id].domain != DOMAIN: # Download the backup to the supervisor. Supervisor will clean up the backup # two days after the restore is done. @@ -349,6 +377,10 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): stream=await open_stream(), suggested_filename=f"{backup_id}.tar", ) + restore_location = LOCATION_CLOUD_BACKUP + else: + agent = cast(SupervisorBackupAgent, manager.backup_agents[agent_id]) + restore_location = agent.location job = await self._client.backups.partial_restore( backup_id, @@ -358,6 +390,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): homeassistant=restore_homeassistant, password=password, background=True, + location=restore_location, ), ) diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 5b3f6ff44a2..75cc049f7b5 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -14,7 +14,10 @@ import os from typing import Any from unittest.mock import AsyncMock, Mock, patch -from aiohasupervisor.exceptions import SupervisorBadRequestError +from aiohasupervisor.exceptions import ( + SupervisorBadRequestError, + SupervisorNotFoundError, +) from aiohasupervisor.models import ( backups as supervisor_backups, mounts as supervisor_mounts, @@ -403,6 +406,10 @@ async def test_agent_download( assert resp.status == 200 assert await resp.content.read() == b"backup data" + supervisor_client.backups.download_backup.assert_called_once_with( + "abc123", options=supervisor_backups.DownloadBackupOptions(location=None) + ) + @pytest.mark.usefixtures("hassio_client", "setup_integration") async def test_agent_download_unavailable_backup( @@ -491,7 +498,9 @@ async def test_agent_delete_backup( assert response["success"] assert response["result"] == {"agent_errors": {}} - supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) + supervisor_client.backups.remove_backup.assert_called_once_with( + backup_id, options=supervisor_backups.RemoveBackupOptions(location={None}) + ) @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -512,6 +521,13 @@ async def test_agent_delete_backup( "result": {"agent_errors": {}}, }, ), + ( + SupervisorNotFoundError(), + { + "success": True, + "result": {"agent_errors": {}}, + }, + ), ], ) async def test_agent_delete_with_error( @@ -535,7 +551,9 @@ async def test_agent_delete_with_error( response = await client.receive_json() assert response == {"id": 1, "type": "result"} | expected_response - supervisor_client.backups.remove_backup.assert_called_once_with(backup_id) + supervisor_client.backups.remove_backup.assert_called_once_with( + backup_id, options=supervisor_backups.RemoveBackupOptions(location={None}) + ) @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -627,7 +645,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( ), ( {"include_all_addons": True}, - DEFAULT_BACKUP_OPTIONS, + replace(DEFAULT_BACKUP_OPTIONS, addons="all"), ), ( {"include_database": False}, @@ -782,7 +800,10 @@ async def test_reader_writer_create_remote_backup( } supervisor_client.backups.download_backup.assert_called_once_with("test_slug") - supervisor_client.backups.remove_backup.assert_called_once_with("test_slug") + supervisor_client.backups.remove_backup.assert_called_once_with( + "test_slug", + options=supervisor_backups.RemoveBackupOptions({LOCATION_CLOUD_BACKUP}), + ) @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -895,7 +916,10 @@ async def test_agent_receive_remote_backup( assert resp.status == 201 supervisor_client.backups.download_backup.assert_called_once_with("test_slug") - supervisor_client.backups.remove_backup.assert_called_once_with("test_slug") + supervisor_client.backups.remove_backup.assert_called_once_with( + "test_slug", + options=supervisor_backups.RemoveBackupOptions({LOCATION_CLOUD_BACKUP}), + ) @pytest.mark.usefixtures("hassio_client", "setup_integration") @@ -933,6 +957,7 @@ async def test_reader_writer_restore( background=True, folders=None, homeassistant=True, + location=None, password=None, ), ) From 3341e3d95b41d37a635a5b3f13b19d158e4d3a05 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Tue, 17 Dec 2024 17:43:56 +0100 Subject: [PATCH 366/677] Fix two occurrences of "HomeAssistant" adding the missing space (#133435) --- homeassistant/components/roon/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/roon/strings.json b/homeassistant/components/roon/strings.json index 85cb53b9010..463f0431891 100644 --- a/homeassistant/components/roon/strings.json +++ b/homeassistant/components/roon/strings.json @@ -10,8 +10,8 @@ } }, "link": { - "title": "Authorize HomeAssistant in Roon", - "description": "You must authorize Home Assistant in Roon. After you select **Submit**, go to the Roon Core application, open **Settings** and enable HomeAssistant on the **Extensions** tab." + "title": "Authorize Home Assistant in Roon", + "description": "You must authorize Home Assistant in Roon. After you select **Submit**, go to the Roon Core application, open **Settings** and enable Home Assistant on the **Extensions** tab." } }, "error": { From 89eda9e068870c3b33ad6d9368090bac5a0bd511 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 17:47:17 +0100 Subject: [PATCH 367/677] Don't raise when removing non-existing cloud backup (#133429) --- homeassistant/components/cloud/backup.py | 2 +- tests/components/cloud/test_backup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index d394daa7dc5..e826c229321 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -167,7 +167,7 @@ class CloudBackupAgent(BackupAgent): :param backup_id: The ID of the backup that was returned in async_list_backups. """ if not await self.async_get_backup(backup_id): - raise BackupAgentError("Backup not found") + return try: await async_files_delete_file( diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index ac0ef1826de..5e607bbc70b 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -570,4 +570,4 @@ async def test_agents_delete_not_found( response = await client.receive_json() assert response["success"] - assert response["result"] == {"agent_errors": {"cloud.cloud": "Backup not found"}} + assert response["result"] == {"agent_errors": {}} From 1de8d63a63c2ca973ca54339402ab1f5bb0a0986 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Tue, 17 Dec 2024 17:48:18 +0100 Subject: [PATCH 368/677] Remove three duplicated space characters in strings.json (#133436) --- homeassistant/components/smartthings/strings.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/smartthings/strings.json b/homeassistant/components/smartthings/strings.json index 7fbf966fa89..de94e5adfcd 100644 --- a/homeassistant/components/smartthings/strings.json +++ b/homeassistant/components/smartthings/strings.json @@ -7,14 +7,14 @@ }, "pat": { "title": "Enter Personal Access Token", - "description": "Please enter a SmartThings [Personal Access Token]({token_url}) that has been created per the [instructions]({component_url}). This will be used to create the Home Assistant integration within your SmartThings account.", + "description": "Please enter a SmartThings [Personal Access Token]({token_url}) that has been created per the [instructions]({component_url}). This will be used to create the Home Assistant integration within your SmartThings account.", "data": { "access_token": "[%key:common::config_flow::data::access_token%]" } }, "select_location": { "title": "Select Location", - "description": "Please select the SmartThings Location you wish to add to Home Assistant. We will then open a new window and ask you to login and authorize installation of the Home Assistant integration into the selected location.", + "description": "Please select the SmartThings Location you wish to add to Home Assistant. We will then open a new window and ask you to login and authorize installation of the Home Assistant integration into the selected location.", "data": { "location_id": "[%key:common::config_flow::data::location%]" } }, "authorize": { "title": "Authorize Home Assistant" } @@ -27,7 +27,7 @@ "token_invalid_format": "The token must be in the UID/GUID format", "token_unauthorized": "The token is invalid or no longer authorized.", "token_forbidden": "The token does not have the required OAuth scopes.", - "app_setup_error": "Unable to set up the SmartApp. Please try again.", + "app_setup_error": "Unable to set up the SmartApp. Please try again.", "webhook_error": "SmartThings could not validate the webhook URL. Please ensure the webhook URL is reachable from the internet and try again." } } From da85c497bf76f60cfaa44ab26dded8fe007107b0 Mon Sep 17 00:00:00 2001 From: DrBlokmeister <57352628+DrBlokmeister@users.noreply.github.com> Date: Tue, 17 Dec 2024 17:48:54 +0100 Subject: [PATCH 369/677] Add transmission download path to events + add_torrent service (#121371) Co-authored-by: Erik Montnemery Co-authored-by: Joost Lekkerkerker --- .../components/transmission/__init__.py | 18 ++++++++++++++-- .../components/transmission/const.py | 1 + .../components/transmission/coordinator.py | 21 ++++++++++++++++--- .../components/transmission/services.yaml | 5 +++++ .../components/transmission/strings.json | 4 ++++ 5 files changed, 44 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/transmission/__init__.py b/homeassistant/components/transmission/__init__.py index 1c108831acf..1a8ffdea0c2 100644 --- a/homeassistant/components/transmission/__init__.py +++ b/homeassistant/components/transmission/__init__.py @@ -42,6 +42,7 @@ from homeassistant.helpers.typing import ConfigType from .const import ( ATTR_DELETE_DATA, + ATTR_DOWNLOAD_PATH, ATTR_TORRENT, CONF_ENTRY_ID, DEFAULT_DELETE_DATA, @@ -82,7 +83,12 @@ SERVICE_BASE_SCHEMA = vol.Schema( ) SERVICE_ADD_TORRENT_SCHEMA = vol.All( - SERVICE_BASE_SCHEMA.extend({vol.Required(ATTR_TORRENT): cv.string}), + SERVICE_BASE_SCHEMA.extend( + { + vol.Required(ATTR_TORRENT): cv.string, + vol.Optional(ATTR_DOWNLOAD_PATH, default=None): cv.string, + } + ), ) @@ -213,10 +219,18 @@ def setup_hass_services(hass: HomeAssistant) -> None: entry_id: str = service.data[CONF_ENTRY_ID] coordinator = _get_coordinator_from_service_data(hass, entry_id) torrent: str = service.data[ATTR_TORRENT] + download_path: str | None = service.data.get(ATTR_DOWNLOAD_PATH) if torrent.startswith( ("http", "ftp:", "magnet:") ) or hass.config.is_allowed_path(torrent): - await hass.async_add_executor_job(coordinator.api.add_torrent, torrent) + if download_path: + await hass.async_add_executor_job( + partial( + coordinator.api.add_torrent, torrent, download_dir=download_path + ) + ) + else: + await hass.async_add_executor_job(coordinator.api.add_torrent, torrent) await coordinator.async_request_refresh() else: _LOGGER.warning("Could not add torrent: unsupported type or no permission") diff --git a/homeassistant/components/transmission/const.py b/homeassistant/components/transmission/const.py index 120918b24a2..c232f26cefd 100644 --- a/homeassistant/components/transmission/const.py +++ b/homeassistant/components/transmission/const.py @@ -40,6 +40,7 @@ STATE_ATTR_TORRENT_INFO = "torrent_info" ATTR_DELETE_DATA = "delete_data" ATTR_TORRENT = "torrent" +ATTR_DOWNLOAD_PATH = "download_path" SERVICE_ADD_TORRENT = "add_torrent" SERVICE_REMOVE_TORRENT = "remove_torrent" diff --git a/homeassistant/components/transmission/coordinator.py b/homeassistant/components/transmission/coordinator.py index e0930bd9e9e..b998ab6fbdd 100644 --- a/homeassistant/components/transmission/coordinator.py +++ b/homeassistant/components/transmission/coordinator.py @@ -102,7 +102,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]): for torrent in current_completed_torrents: if torrent.id not in old_completed_torrents: self.hass.bus.fire( - EVENT_DOWNLOADED_TORRENT, {"name": torrent.name, "id": torrent.id} + EVENT_DOWNLOADED_TORRENT, + { + "name": torrent.name, + "id": torrent.id, + "download_path": torrent.download_dir, + }, ) self._completed_torrents = current_completed_torrents @@ -118,7 +123,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]): for torrent in current_started_torrents: if torrent.id not in old_started_torrents: self.hass.bus.fire( - EVENT_STARTED_TORRENT, {"name": torrent.name, "id": torrent.id} + EVENT_STARTED_TORRENT, + { + "name": torrent.name, + "id": torrent.id, + "download_path": torrent.download_dir, + }, ) self._started_torrents = current_started_torrents @@ -130,7 +140,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]): for torrent in self._all_torrents: if torrent.id not in current_torrents: self.hass.bus.fire( - EVENT_REMOVED_TORRENT, {"name": torrent.name, "id": torrent.id} + EVENT_REMOVED_TORRENT, + { + "name": torrent.name, + "id": torrent.id, + "download_path": torrent.download_dir, + }, ) self._all_torrents = self.torrents.copy() diff --git a/homeassistant/components/transmission/services.yaml b/homeassistant/components/transmission/services.yaml index 2d61bda442f..8f9aadd5009 100644 --- a/homeassistant/components/transmission/services.yaml +++ b/homeassistant/components/transmission/services.yaml @@ -9,6 +9,11 @@ add_torrent: example: http://releases.ubuntu.com/19.04/ubuntu-19.04-desktop-amd64.iso.torrent selector: text: + download_path: + required: false + example: "/path/to/download/directory" + selector: + text: remove_torrent: fields: diff --git a/homeassistant/components/transmission/strings.json b/homeassistant/components/transmission/strings.json index 578bc262589..aabc5827a88 100644 --- a/homeassistant/components/transmission/strings.json +++ b/homeassistant/components/transmission/strings.json @@ -101,6 +101,10 @@ "torrent": { "name": "Torrent", "description": "URL, magnet link or Base64 encoded file." + }, + "download_path": { + "name": "Download path", + "description": "Optional path to specify where the torrent should be downloaded. If not specified, the default download directory is used." } } }, From 98d50206900695d5108852b0c3c2340dff5ddb90 Mon Sep 17 00:00:00 2001 From: Kevin Stillhammer Date: Tue, 17 Dec 2024 18:00:23 +0100 Subject: [PATCH 370/677] Support units and filters in async_get_travel_times_service for waze_travel_time (#130776) --- .../components/waze_travel_time/__init__.py | 37 ++++++++++++++++++- .../components/waze_travel_time/sensor.py | 15 +------- .../components/waze_travel_time/services.yaml | 10 +++++ .../components/waze_travel_time/strings.json | 8 ++++ .../components/waze_travel_time/test_init.py | 10 ++--- 5 files changed, 59 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/waze_travel_time/__init__.py b/homeassistant/components/waze_travel_time/__init__.py index 1abcf9d391d..34f22c9218f 100644 --- a/homeassistant/components/waze_travel_time/__init__.py +++ b/homeassistant/components/waze_travel_time/__init__.py @@ -3,12 +3,13 @@ import asyncio from collections.abc import Collection import logging +from typing import Literal from pywaze.route_calculator import CalcRoutesResponse, WazeRouteCalculator, WRCError import voluptuous as vol from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_REGION, Platform +from homeassistant.const import CONF_REGION, Platform, UnitOfLength from homeassistant.core import ( HomeAssistant, ServiceCall, @@ -22,7 +23,10 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, SelectSelectorMode, TextSelector, + TextSelectorConfig, + TextSelectorType, ) +from homeassistant.util.unit_conversion import DistanceConverter from .const import ( CONF_AVOID_FERRIES, @@ -38,6 +42,7 @@ from .const import ( DEFAULT_FILTER, DEFAULT_VEHICLE_TYPE, DOMAIN, + IMPERIAL_UNITS, METRIC_UNITS, REGIONS, SEMAPHORE, @@ -80,6 +85,18 @@ SERVICE_GET_TRAVEL_TIMES_SCHEMA = vol.Schema( vol.Optional(CONF_AVOID_TOLL_ROADS, default=False): BooleanSelector(), vol.Optional(CONF_AVOID_SUBSCRIPTION_ROADS, default=False): BooleanSelector(), vol.Optional(CONF_AVOID_FERRIES, default=False): BooleanSelector(), + vol.Optional(CONF_INCL_FILTER): TextSelector( + TextSelectorConfig( + type=TextSelectorType.TEXT, + multiple=True, + ), + ), + vol.Optional(CONF_EXCL_FILTER): TextSelector( + TextSelectorConfig( + type=TextSelectorType.TEXT, + multiple=True, + ), + ), } ) @@ -107,6 +124,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b avoid_subscription_roads=service.data[CONF_AVOID_SUBSCRIPTION_ROADS], avoid_ferries=service.data[CONF_AVOID_FERRIES], realtime=service.data[CONF_REALTIME], + units=service.data[CONF_UNITS], + incl_filters=service.data.get(CONF_INCL_FILTER, DEFAULT_FILTER), + excl_filters=service.data.get(CONF_EXCL_FILTER, DEFAULT_FILTER), ) return {"routes": [vars(route) for route in response]} if response else None @@ -129,6 +149,7 @@ async def async_get_travel_times( avoid_subscription_roads: bool, avoid_ferries: bool, realtime: bool, + units: Literal["metric", "imperial"] = "metric", incl_filters: Collection[str] | None = None, excl_filters: Collection[str] | None = None, ) -> list[CalcRoutesResponse] | None: @@ -194,6 +215,20 @@ async def async_get_travel_times( route for route in incl_routes if not should_exclude_route(route) ] + if units == IMPERIAL_UNITS: + filtered_routes = [ + CalcRoutesResponse( + name=route.name, + distance=DistanceConverter.convert( + route.distance, UnitOfLength.KILOMETERS, UnitOfLength.MILES + ), + duration=route.duration, + street_names=route.street_names, + ) + for route in filtered_routes + if route.distance is not None + ] + if len(filtered_routes) < 1: _LOGGER.warning("No routes found") return None diff --git a/homeassistant/components/waze_travel_time/sensor.py b/homeassistant/components/waze_travel_time/sensor.py index c2d3ee12cf8..a216a02f61e 100644 --- a/homeassistant/components/waze_travel_time/sensor.py +++ b/homeassistant/components/waze_travel_time/sensor.py @@ -20,7 +20,6 @@ from homeassistant.const import ( CONF_NAME, CONF_REGION, EVENT_HOMEASSISTANT_STARTED, - UnitOfLength, UnitOfTime, ) from homeassistant.core import CoreState, HomeAssistant @@ -28,7 +27,6 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.location import find_coordinates -from homeassistant.util.unit_conversion import DistanceConverter from . import async_get_travel_times from .const import ( @@ -44,7 +42,6 @@ from .const import ( CONF_VEHICLE_TYPE, DEFAULT_NAME, DOMAIN, - IMPERIAL_UNITS, SEMAPHORE, ) @@ -201,6 +198,7 @@ class WazeTravelTimeData: avoid_subscription_roads, avoid_ferries, realtime, + self.config_entry.options[CONF_UNITS], incl_filter, excl_filter, ) @@ -211,14 +209,5 @@ class WazeTravelTimeData: return self.duration = route.duration - distance = route.distance - - if self.config_entry.options[CONF_UNITS] == IMPERIAL_UNITS: - # Convert to miles. - self.distance = DistanceConverter.convert( - distance, UnitOfLength.KILOMETERS, UnitOfLength.MILES - ) - else: - self.distance = distance - + self.distance = route.distance self.route = route.name diff --git a/homeassistant/components/waze_travel_time/services.yaml b/homeassistant/components/waze_travel_time/services.yaml index 7fba565dd47..fd5f2e9adea 100644 --- a/homeassistant/components/waze_travel_time/services.yaml +++ b/homeassistant/components/waze_travel_time/services.yaml @@ -55,3 +55,13 @@ get_travel_times: required: false selector: boolean: + incl_filter: + required: false + selector: + text: + multiple: true + excl_filter: + required: false + selector: + text: + multiple: true diff --git a/homeassistant/components/waze_travel_time/strings.json b/homeassistant/components/waze_travel_time/strings.json index f053f033307..cca1789bf7e 100644 --- a/homeassistant/components/waze_travel_time/strings.json +++ b/homeassistant/components/waze_travel_time/strings.json @@ -101,6 +101,14 @@ "avoid_subscription_roads": { "name": "[%key:component::waze_travel_time::options::step::init::data::avoid_subscription_roads%]", "description": "Whether to avoid subscription roads." + }, + "incl_filter": { + "name": "[%key:component::waze_travel_time::options::step::init::data::incl_filter%]", + "description": "Exact streetname which must be part of the selected route." + }, + "excl_filter": { + "name": "[%key:component::waze_travel_time::options::step::init::data::excl_filter%]", + "description": "Exact streetname which must NOT be part of the selected route." } } } diff --git a/tests/components/waze_travel_time/test_init.py b/tests/components/waze_travel_time/test_init.py index 9c59278ff99..89bccc00985 100644 --- a/tests/components/waze_travel_time/test_init.py +++ b/tests/components/waze_travel_time/test_init.py @@ -44,6 +44,8 @@ async def test_service_get_travel_times(hass: HomeAssistant) -> None: "destination": "location2", "vehicle_type": "car", "region": "us", + "units": "imperial", + "incl_filter": ["IncludeThis"], }, blocking=True, return_response=True, @@ -51,17 +53,11 @@ async def test_service_get_travel_times(hass: HomeAssistant) -> None: assert response_data == { "routes": [ { - "distance": 300, + "distance": pytest.approx(186.4113), "duration": 150, "name": "E1337 - Teststreet", "street_names": ["E1337", "IncludeThis", "Teststreet"], }, - { - "distance": 500, - "duration": 600, - "name": "E0815 - Otherstreet", - "street_names": ["E0815", "ExcludeThis", "Otherstreet"], - }, ] } From b5f6734197a83d93e00bec080edcf42126961370 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 17 Dec 2024 19:23:54 +0100 Subject: [PATCH 371/677] Simplify modern_forms config flow (part 2) (#130494) --- .../components/modern_forms/config_flow.py | 67 +++++++++---------- 1 file changed, 30 insertions(+), 37 deletions(-) diff --git a/homeassistant/components/modern_forms/config_flow.py b/homeassistant/components/modern_forms/config_flow.py index 6799dbf97d3..3c217b5747f 100644 --- a/homeassistant/components/modern_forms/config_flow.py +++ b/homeassistant/components/modern_forms/config_flow.py @@ -22,7 +22,7 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - host: str | None = None + host: str mac: str | None = None name: str @@ -30,7 +30,13 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle setup by user for Modern Forms integration.""" - return await self._handle_config_flow(user_input) + if user_input is None: + return self.async_show_form( + step_id="user", + data_schema=USER_SCHEMA, + ) + self.host = user_input[CONF_HOST] + return await self._handle_config_flow() async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -44,40 +50,26 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): self.mac = discovery_info.properties.get(CONF_MAC) self.name = name - # Prepare configuration flow - return await self._handle_config_flow({}, True) + # Loop through self._handle_config_flow to ensure we load the + # MAC if it is missing, and abort if already configured + return await self._handle_config_flow(True) async def async_step_zeroconf_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by zeroconf.""" - return await self._handle_config_flow(user_input) + return await self._handle_config_flow() async def _handle_config_flow( - self, user_input: dict[str, Any] | None = None, prepare: bool = False + self, initial_zeroconf: bool = False ) -> ConfigFlowResult: """Config flow handler for ModernForms.""" - # Request user input, unless we are preparing discovery flow - if user_input is None: - user_input = {} - if not prepare: - if self.source == SOURCE_ZEROCONF: - return self.async_show_form( - step_id="zeroconf_confirm", - description_placeholders={"name": self.name}, - ) - return self.async_show_form( - step_id="user", - data_schema=USER_SCHEMA, - ) - - if self.source == SOURCE_ZEROCONF: - user_input[CONF_HOST] = self.host - user_input[CONF_MAC] = self.mac - - if user_input.get(CONF_MAC) is None or not prepare: + if self.mac is None or not initial_zeroconf: + # User flow + # Or zeroconf without MAC + # Or zeroconf with MAC, but need to ensure device is still available session = async_get_clientsession(self.hass) - device = ModernFormsDevice(user_input[CONF_HOST], session=session) + device = ModernFormsDevice(self.host, session=session) try: device = await device.update() except ModernFormsConnectionError: @@ -88,20 +80,21 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): data_schema=USER_SCHEMA, errors={"base": "cannot_connect"}, ) - user_input[CONF_MAC] = device.info.mac_address + self.mac = device.info.mac_address + if self.source != SOURCE_ZEROCONF: + self.name = device.info.device_name # Check if already configured - await self.async_set_unique_id(user_input[CONF_MAC]) - self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]}) + await self.async_set_unique_id(self.mac) + self._abort_if_unique_id_configured(updates={CONF_HOST: self.host}) - title = device.info.device_name - if self.source == SOURCE_ZEROCONF: - title = self.name - - if prepare: - return await self.async_step_zeroconf_confirm() + if initial_zeroconf: + return self.async_show_form( + step_id="zeroconf_confirm", + description_placeholders={"name": self.name}, + ) return self.async_create_entry( - title=title, - data={CONF_HOST: user_input[CONF_HOST], CONF_MAC: user_input[CONF_MAC]}, + title=self.name, + data={CONF_HOST: self.host, CONF_MAC: self.mac}, ) From af1222e97ba00eb1ebcec2049c25b77a70e064a1 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Tue, 17 Dec 2024 19:31:25 +0100 Subject: [PATCH 372/677] Distinct sources per zone in Onkyo (#130547) --- .../components/onkyo/media_player.py | 70 +++++++++++++------ homeassistant/components/onkyo/strings.json | 5 ++ 2 files changed, 53 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index 24d63c0d9e4..76194672bb7 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from functools import cache import logging from typing import Any, Literal @@ -19,6 +20,7 @@ from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue @@ -128,10 +130,17 @@ VIDEO_INFORMATION_MAPPING = [ ] ISSUE_URL_PLACEHOLDER = "/config/integrations/dashboard/add?domain=onkyo" -type InputLibValue = str | tuple[str, ...] +type LibValue = str | tuple[str, ...] -def _input_lib_cmds(zone: str) -> dict[InputSource, InputLibValue]: +def _get_single_lib_value(value: LibValue) -> str: + if isinstance(value, str): + return value + return value[0] + + +@cache +def _input_source_lib_mappings(zone: str) -> dict[InputSource, LibValue]: match zone: case "main": cmds = PYEISCP_COMMANDS["main"]["SLI"] @@ -142,7 +151,7 @@ def _input_lib_cmds(zone: str) -> dict[InputSource, InputLibValue]: case "zone4": cmds = PYEISCP_COMMANDS["zone4"]["SL4"] - result: dict[InputSource, InputLibValue] = {} + result: dict[InputSource, LibValue] = {} for k, v in cmds["values"].items(): try: source = InputSource(k) @@ -153,6 +162,11 @@ def _input_lib_cmds(zone: str) -> dict[InputSource, InputLibValue]: return result +@cache +def _rev_input_source_lib_mappings(zone: str) -> dict[LibValue, InputSource]: + return {value: key for key, value in _input_source_lib_mappings(zone).items()} + + async def async_setup_platform( hass: HomeAssistant, config: ConfigType, @@ -164,7 +178,7 @@ async def async_setup_platform( source_mapping: dict[str, InputSource] = {} for zone in ZONES: - for source, source_lib in _input_lib_cmds(zone).items(): + for source, source_lib in _input_source_lib_mappings(zone).items(): if isinstance(source_lib, str): source_mapping.setdefault(source_lib, source) else: @@ -353,14 +367,18 @@ class OnkyoMediaPlayer(MediaPlayerEntity): self._volume_resolution = volume_resolution self._max_volume = max_volume - self._name_mapping = sources - self._reverse_name_mapping = {value: key for key, value in sources.items()} - self._lib_mapping = _input_lib_cmds(zone) - self._reverse_lib_mapping = { - value: key for key, value in self._lib_mapping.items() + self._source_lib_mapping = _input_source_lib_mappings(zone) + self._rev_source_lib_mapping = _rev_input_source_lib_mappings(zone) + self._source_mapping = { + key: value + for key, value in sources.items() + if key in self._source_lib_mapping + } + self._rev_source_mapping = { + value: key for key, value in self._source_mapping.items() } - self._attr_source_list = list(sources.values()) + self._attr_source_list = list(self._rev_source_mapping) self._attr_extra_state_attributes = {} async def async_added_to_hass(self) -> None: @@ -429,12 +447,18 @@ class OnkyoMediaPlayer(MediaPlayerEntity): async def async_select_source(self, source: str) -> None: """Select input source.""" - if self.source_list and source in self.source_list: - source_lib = self._lib_mapping[self._reverse_name_mapping[source]] - if isinstance(source_lib, str): - source_lib_single = source_lib - else: - source_lib_single = source_lib[0] + if not self.source_list or source not in self.source_list: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_source", + translation_placeholders={ + "invalid_source": source, + "entity_id": self.entity_id, + }, + ) + + source_lib = self._source_lib_mapping[self._rev_source_mapping[source]] + source_lib_single = _get_single_lib_value(source_lib) self._update_receiver( "input-selector" if self._zone == "main" else "selector", source_lib_single ) @@ -448,7 +472,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity): ) -> None: """Play radio station by preset number.""" if self.source is not None: - source = self._reverse_name_mapping[self.source] + source = self._rev_source_mapping[self.source] if media_type.lower() == "radio" and source in DEFAULT_PLAYABLE_SOURCES: self._update_receiver("preset", media_id) @@ -520,15 +544,17 @@ class OnkyoMediaPlayer(MediaPlayerEntity): self.async_write_ha_state() @callback - def _parse_source(self, source_lib: InputLibValue) -> None: - source = self._reverse_lib_mapping[source_lib] - if source in self._name_mapping: - self._attr_source = self._name_mapping[source] + def _parse_source(self, source_lib: LibValue) -> None: + source = self._rev_source_lib_mapping[source_lib] + if source in self._source_mapping: + self._attr_source = self._source_mapping[source] return source_meaning = source.value_meaning _LOGGER.error( - 'Input source "%s" not in source list: %s', source_meaning, self.entity_id + 'Input source "%s" is invalid for entity: %s', + source_meaning, + self.entity_id, ) self._attr_source = source_meaning diff --git a/homeassistant/components/onkyo/strings.json b/homeassistant/components/onkyo/strings.json index 95ca1199a36..849171c7161 100644 --- a/homeassistant/components/onkyo/strings.json +++ b/homeassistant/components/onkyo/strings.json @@ -69,5 +69,10 @@ "title": "The Onkyo YAML configuration import failed", "description": "Configuring Onkyo using YAML is being removed but there was a connection error when importing your YAML configuration for host {host}.\n\nEnsure the connection to the receiver works and restart Home Assistant to try again or remove the Onkyo YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." } + }, + "exceptions": { + "invalid_source": { + "message": "Cannot select input source \"{invalid_source}\" for entity: {entity_id}." + } } } From 633433709f0cf7744c2ec62e0a5cfcce68f5c120 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 20:00:02 +0100 Subject: [PATCH 373/677] Clean up backups after manual backup (#133434) * Clean up backups after manual backup * Address review comments --- homeassistant/components/backup/config.py | 40 ++-- homeassistant/components/backup/manager.py | 6 +- tests/components/backup/conftest.py | 4 +- tests/components/backup/test_websocket.py | 261 +++++++++++++++++++++ 4 files changed, 289 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index 26ce691a4cc..ef21dc81ee5 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -323,25 +323,6 @@ class BackupSchedule: # and handled in the future LOGGER.exception("Unexpected error creating automatic backup") - # delete old backups more numerous than copies - - def _backups_filter( - backups: dict[str, ManagerBackup], - ) -> dict[str, ManagerBackup]: - """Return oldest backups more numerous than copies to delete.""" - # we need to check here since we await before - # this filter is applied - if config_data.retention.copies is None: - return {} - return dict( - sorted( - backups.items(), - key=lambda backup_item: backup_item[1].date, - )[: len(backups) - config_data.retention.copies] - ) - - await _delete_filtered_backups(manager, _backups_filter) - manager.remove_next_backup_event = async_track_point_in_time( manager.hass, _create_backup, next_time ) @@ -469,3 +450,24 @@ async def _delete_filtered_backups( "Error deleting old copies: %s", agent_errors, ) + + +async def delete_backups_exceeding_configured_count(manager: BackupManager) -> None: + """Delete backups exceeding the configured retention count.""" + + def _backups_filter( + backups: dict[str, ManagerBackup], + ) -> dict[str, ManagerBackup]: + """Return oldest backups more numerous than copies to delete.""" + # we need to check here since we await before + # this filter is applied + if manager.config.data.retention.copies is None: + return {} + return dict( + sorted( + backups.items(), + key=lambda backup_item: backup_item[1].date, + )[: len(backups) - manager.config.data.retention.copies] + ) + + await _delete_filtered_backups(manager, _backups_filter) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 66977e568e4..d6abc299317 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -33,7 +33,7 @@ from .agent import ( BackupAgentPlatformProtocol, LocalBackupAgent, ) -from .config import BackupConfig +from .config import BackupConfig, delete_backups_exceeding_configured_count from .const import ( BUF_SIZE, DATA_MANAGER, @@ -750,6 +750,10 @@ class BackupManager: self.known_backups.add( written_backup.backup, agent_errors, with_strategy_settings ) + + # delete old backups more numerous than copies + await delete_backups_exceeding_configured_count(self) + self.async_on_backup_event( CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED) ) diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py index 13f2537db47..ee855fb70f2 100644 --- a/tests/components/backup/conftest.py +++ b/tests/components/backup/conftest.py @@ -9,7 +9,7 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest -from homeassistant.components.backup.manager import WrittenBackup +from homeassistant.components.backup.manager import NewBackup, WrittenBackup from homeassistant.core import HomeAssistant from .common import TEST_BACKUP_PATH_ABC123 @@ -76,7 +76,7 @@ def mock_create_backup() -> Generator[AsyncMock]: with patch( "homeassistant.components.backup.CoreBackupReaderWriter.async_create_backup" ) as mock_create_backup: - mock_create_backup.return_value = (MagicMock(), fut) + mock_create_backup.return_value = (NewBackup(backup_job_id="abc123"), fut) yield mock_create_backup diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 4a94689c19e..665512eca97 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1637,6 +1637,267 @@ async def test_config_retention_copies_logic( ) +@pytest.mark.parametrize( + ("backup_command", "backup_time"), + [ + ( + {"type": "backup/generate_with_strategy_settings"}, + "2024-11-11T12:00:00+01:00", + ), + ( + {"type": "backup/generate", "agent_ids": ["test.test-agent"]}, + None, + ), + ], +) +@pytest.mark.parametrize( + ( + "config_command", + "backups", + "get_backups_agent_errors", + "delete_backup_agent_errors", + "backup_calls", + "get_backups_calls", + "delete_calls", + "delete_args_list", + ), + [ + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": None, "days": None}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + 1, + 1, # we get backups even if backup retention copies is None + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + 1, + 1, + 0, + [], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + 1, + 1, + 1, + [call("backup-1")], + ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 2, "days": None}, + "schedule": "never", + }, + { + "backup-1": MagicMock( + date="2024-11-09T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-3": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-4": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=True, + spec=ManagerBackup, + ), + "backup-5": MagicMock( + date="2024-11-12T04:45:00+01:00", + with_strategy_settings=False, + spec=ManagerBackup, + ), + }, + {}, + {}, + 1, + 1, + 2, + [call("backup-1"), call("backup-2")], + ), + ], +) +async def test_config_retention_copies_logic_manual_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, + delete_backup: AsyncMock, + get_backups: AsyncMock, + config_command: dict[str, Any], + backup_command: dict[str, Any], + backups: dict[str, Any], + get_backups_agent_errors: dict[str, Exception], + delete_backup_agent_errors: dict[str, Exception], + backup_time: str, + backup_calls: int, + get_backups_calls: int, + delete_calls: int, + delete_args_list: Any, +) -> None: + """Test config backup retention copies logic for manual backup.""" + client = await hass_ws_client(hass) + storage_data = { + "backups": {}, + "config": { + "create_backup": { + "agent_ids": ["test-agent"], + "include_addons": ["test-addon"], + "include_all_addons": False, + "include_database": True, + "include_folders": ["media"], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_strategy_backup": None, + "last_completed_strategy_backup": None, + "schedule": {"state": "daily"}, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": 1, + } + get_backups.return_value = (backups, get_backups_agent_errors) + delete_backup.return_value = delete_backup_agent_errors + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-11 12:00:00+01:00") + + await setup_backup_integration(hass, remote_agents=["test-agent"]) + await hass.async_block_till_done() + + await client.send_json_auto_id(config_command) + result = await client.receive_json() + assert result["success"] + + # Create a manual backup + await client.send_json_auto_id(backup_command) + result = await client.receive_json() + assert result["success"] + + # Wait for backup creation to complete + await hass.async_block_till_done() + + assert create_backup.call_count == backup_calls + assert get_backups.call_count == get_backups_calls + assert delete_backup.call_count == delete_calls + assert delete_backup.call_args_list == delete_args_list + async_fire_time_changed(hass, fire_all=True) # flush out storage save + await hass.async_block_till_done() + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + == backup_time + ) + assert ( + hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + == backup_time + ) + + @pytest.mark.parametrize( ( "command", From d22668a1662beef164fa769be836f106d37263a6 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 17 Dec 2024 20:02:12 +0100 Subject: [PATCH 374/677] Don't run recorder data migration on new databases (#133412) * Don't run recorder data migration on new databases * Add tests --- homeassistant/components/recorder/core.py | 1 + .../components/recorder/migration.py | 107 +++++++-- tests/components/recorder/test_init.py | 9 +- tests/components/recorder/test_migrate.py | 2 + ..._migration_run_time_migrations_remember.py | 205 ++++++++++++++++-- .../components/recorder/test_v32_migration.py | 53 ++++- 6 files changed, 330 insertions(+), 47 deletions(-) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 76cf0a7c05e..9d9b70586a6 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -970,6 +970,7 @@ class Recorder(threading.Thread): # which does not need migration or repair. new_schema_status = migration.SchemaValidationStatus( current_version=SCHEMA_VERSION, + initial_version=SCHEMA_VERSION, migration_needed=False, non_live_data_migration_needed=False, schema_errors=set(), diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index b28ca4399c8..74e3b08f51c 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -180,7 +180,27 @@ def raise_if_exception_missing_str(ex: Exception, match_substrs: Iterable[str]) raise ex -def _get_schema_version(session: Session) -> int | None: +def _get_initial_schema_version(session: Session) -> int | None: + """Get the schema version the database was created with.""" + res = ( + session.query(SchemaChanges.schema_version) + .order_by(SchemaChanges.change_id.asc()) + .first() + ) + return getattr(res, "schema_version", None) + + +def get_initial_schema_version(session_maker: Callable[[], Session]) -> int | None: + """Get the schema version the database was created with.""" + try: + with session_scope(session=session_maker(), read_only=True) as session: + return _get_initial_schema_version(session) + except Exception: + _LOGGER.exception("Error when determining DB schema version") + return None + + +def _get_current_schema_version(session: Session) -> int | None: """Get the schema version.""" res = ( session.query(SchemaChanges.schema_version) @@ -190,11 +210,11 @@ def _get_schema_version(session: Session) -> int | None: return getattr(res, "schema_version", None) -def get_schema_version(session_maker: Callable[[], Session]) -> int | None: +def get_current_schema_version(session_maker: Callable[[], Session]) -> int | None: """Get the schema version.""" try: with session_scope(session=session_maker(), read_only=True) as session: - return _get_schema_version(session) + return _get_current_schema_version(session) except Exception: _LOGGER.exception("Error when determining DB schema version") return None @@ -205,6 +225,7 @@ class SchemaValidationStatus: """Store schema validation status.""" current_version: int + initial_version: int migration_needed: bool non_live_data_migration_needed: bool schema_errors: set[str] @@ -227,8 +248,9 @@ def validate_db_schema( """ schema_errors: set[str] = set() - current_version = get_schema_version(session_maker) - if current_version is None: + current_version = get_current_schema_version(session_maker) + initial_version = get_initial_schema_version(session_maker) + if current_version is None or initial_version is None: return None if is_current := _schema_is_current(current_version): @@ -238,11 +260,15 @@ def validate_db_schema( schema_migration_needed = not is_current _non_live_data_migration_needed = non_live_data_migration_needed( - instance, session_maker, current_version + instance, + session_maker, + initial_schema_version=initial_version, + start_schema_version=current_version, ) return SchemaValidationStatus( current_version=current_version, + initial_version=initial_version, non_live_data_migration_needed=_non_live_data_migration_needed, migration_needed=schema_migration_needed or _non_live_data_migration_needed, schema_errors=schema_errors, @@ -377,17 +403,26 @@ def _get_migration_changes(session: Session) -> dict[str, int]: def non_live_data_migration_needed( instance: Recorder, session_maker: Callable[[], Session], - schema_version: int, + *, + initial_schema_version: int, + start_schema_version: int, ) -> bool: """Return True if non-live data migration is needed. + :param initial_schema_version: The schema version the database was created with. + :param start_schema_version: The schema version when starting the migration. + This must only be called if database schema is current. """ migration_needed = False with session_scope(session=session_maker()) as session: migration_changes = _get_migration_changes(session) for migrator_cls in NON_LIVE_DATA_MIGRATORS: - migrator = migrator_cls(schema_version, migration_changes) + migrator = migrator_cls( + initial_schema_version=initial_schema_version, + start_schema_version=start_schema_version, + migration_changes=migration_changes, + ) migration_needed |= migrator.needs_migrate(instance, session) return migration_needed @@ -406,7 +441,11 @@ def migrate_data_non_live( migration_changes = _get_migration_changes(session) for migrator_cls in NON_LIVE_DATA_MIGRATORS: - migrator = migrator_cls(schema_status.start_version, migration_changes) + migrator = migrator_cls( + initial_schema_version=schema_status.initial_version, + start_schema_version=schema_status.start_version, + migration_changes=migration_changes, + ) migrator.migrate_all(instance, session_maker) @@ -423,7 +462,11 @@ def migrate_data_live( migration_changes = _get_migration_changes(session) for migrator_cls in LIVE_DATA_MIGRATORS: - migrator = migrator_cls(schema_status.start_version, migration_changes) + migrator = migrator_cls( + initial_schema_version=schema_status.initial_version, + start_schema_version=schema_status.start_version, + migration_changes=migration_changes, + ) migrator.queue_migration(instance, session) @@ -2233,7 +2276,7 @@ def initialize_database(session_maker: Callable[[], Session]) -> bool: """Initialize a new database.""" try: with session_scope(session=session_maker(), read_only=True) as session: - if _get_schema_version(session) is not None: + if _get_current_schema_version(session) is not None: return True with session_scope(session=session_maker()) as session: @@ -2277,13 +2320,25 @@ class BaseMigration(ABC): """Base class for migrations.""" index_to_drop: tuple[str, str] | None = None - required_schema_version = 0 + required_schema_version = 0 # Schema version required to run migration queries + max_initial_schema_version: int # Skip migration if db created after this version migration_version = 1 migration_id: str - def __init__(self, schema_version: int, migration_changes: dict[str, int]) -> None: - """Initialize a new BaseRunTimeMigration.""" - self.schema_version = schema_version + def __init__( + self, + *, + initial_schema_version: int, + start_schema_version: int, + migration_changes: dict[str, int], + ) -> None: + """Initialize a new BaseRunTimeMigration. + + :param initial_schema_version: The schema version the database was created with. + :param start_schema_version: The schema version when starting the migration. + """ + self.initial_schema_version = initial_schema_version + self.start_schema_version = start_schema_version self.migration_changes = migration_changes @abstractmethod @@ -2324,7 +2379,15 @@ class BaseMigration(ABC): mark the migration as done in the database if its not already marked as done. """ - if self.schema_version < self.required_schema_version: + if self.initial_schema_version > self.max_initial_schema_version: + _LOGGER.debug( + "Data migration '%s' not needed, database created with version %s " + "after migrator was added", + self.migration_id, + self.initial_schema_version, + ) + return False + if self.start_schema_version < self.required_schema_version: # Schema is too old, we must have to migrate _LOGGER.info( "Data migration '%s' needed, schema too old", self.migration_id @@ -2426,6 +2489,7 @@ class StatesContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate states context_ids to binary format.""" required_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION + max_initial_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION - 1 migration_id = "state_context_id_as_binary" migration_version = 2 index_to_drop = ("states", "ix_states_context_id") @@ -2469,6 +2533,7 @@ class EventsContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate events context_ids to binary format.""" required_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION + max_initial_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION - 1 migration_id = "event_context_id_as_binary" migration_version = 2 index_to_drop = ("events", "ix_events_context_id") @@ -2512,6 +2577,7 @@ class EventTypeIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate event_type to event_type_ids.""" required_schema_version = EVENT_TYPE_IDS_SCHEMA_VERSION + max_initial_schema_version = EVENT_TYPE_IDS_SCHEMA_VERSION - 1 migration_id = "event_type_id_migration" def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: @@ -2581,6 +2647,7 @@ class EntityIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): """Migration to migrate entity_ids to states_meta.""" required_schema_version = STATES_META_SCHEMA_VERSION + max_initial_schema_version = STATES_META_SCHEMA_VERSION - 1 migration_id = "entity_id_migration" def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: @@ -2660,6 +2727,7 @@ class EventIDPostMigration(BaseRunTimeMigration): """Migration to remove old event_id index from states.""" migration_id = "event_id_post_migration" + max_initial_schema_version = LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION - 1 task = MigrationTask migration_version = 2 @@ -2728,7 +2796,7 @@ class EventIDPostMigration(BaseRunTimeMigration): self, instance: Recorder, session: Session ) -> DataMigrationStatus: """Return if the migration needs to run.""" - if self.schema_version <= LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION: + if self.start_schema_version <= LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION: return DataMigrationStatus(needs_migrate=False, migration_done=False) if get_index_by_name( session, TABLE_STATES, LEGACY_STATES_EVENT_ID_INDEX @@ -2745,6 +2813,7 @@ class EntityIDPostMigration(BaseMigrationWithQuery, BaseOffLineMigration): """ migration_id = "entity_id_post_migration" + max_initial_schema_version = STATES_META_SCHEMA_VERSION - 1 index_to_drop = (TABLE_STATES, LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX) def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: @@ -2758,8 +2827,8 @@ class EntityIDPostMigration(BaseMigrationWithQuery, BaseOffLineMigration): NON_LIVE_DATA_MIGRATORS: tuple[type[BaseOffLineMigration], ...] = ( - StatesContextIDMigration, # Introduced in HA Core 2023.4 - EventsContextIDMigration, # Introduced in HA Core 2023.4 + StatesContextIDMigration, # Introduced in HA Core 2023.4 by PR #88942 + EventsContextIDMigration, # Introduced in HA Core 2023.4 by PR #88942 EventTypeIDMigration, # Introduced in HA Core 2023.4 by PR #89465 EntityIDMigration, # Introduced in HA Core 2023.4 by PR #89557 EntityIDPostMigration, # Introduced in HA Core 2023.4 by PR #89557 diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index d16712e0c70..7e5abf1b514 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -964,12 +964,17 @@ async def test_recorder_setup_failure(hass: HomeAssistant) -> None: hass.stop() -async def test_recorder_validate_schema_failure(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + "function_to_patch", ["_get_current_schema_version", "_get_initial_schema_version"] +) +async def test_recorder_validate_schema_failure( + hass: HomeAssistant, function_to_patch: str +) -> None: """Test some exceptions.""" recorder_helper.async_initialize_recorder(hass) with ( patch( - "homeassistant.components.recorder.migration._get_schema_version" + f"homeassistant.components.recorder.migration.{function_to_patch}" ) as inspect_schema_version, patch("homeassistant.components.recorder.core.time.sleep"), ): diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 14978bee5a9..462db70496a 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -97,6 +97,7 @@ async def test_schema_update_calls( session_maker, migration.SchemaValidationStatus( current_version=0, + initial_version=0, migration_needed=True, non_live_data_migration_needed=True, schema_errors=set(), @@ -111,6 +112,7 @@ async def test_schema_update_calls( session_maker, migration.SchemaValidationStatus( current_version=42, + initial_version=0, migration_needed=True, non_live_data_migration_needed=True, schema_errors=set(), diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index 7a333b0a2f5..fa14570bc6b 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -1,8 +1,9 @@ """Test run time migrations are remembered in the migration_changes table.""" +from collections.abc import Callable import importlib import sys -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest from sqlalchemy import create_engine @@ -10,6 +11,7 @@ from sqlalchemy.orm import Session from homeassistant.components import recorder from homeassistant.components.recorder import core, migration, statistics +from homeassistant.components.recorder.db_schema import SCHEMA_VERSION from homeassistant.components.recorder.migration import MigrationTask from homeassistant.components.recorder.queries import get_migration_changes from homeassistant.components.recorder.util import ( @@ -25,7 +27,8 @@ from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" -SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" +SCHEMA_MODULE_CURRENT = "homeassistant.components.recorder.db_schema" @pytest.fixture @@ -46,26 +49,190 @@ def _get_migration_id(hass: HomeAssistant) -> dict[str, int]: return dict(execute_stmt_lambda_element(session, get_migration_changes())) -def _create_engine_test(*args, **kwargs): +def _create_engine_test( + schema_module: str, *, initial_version: int | None = None +) -> Callable: """Test version of create_engine that initializes with old schema. This simulates an existing db with the old schema. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] - engine = create_engine(*args, **kwargs) - old_db_schema.Base.metadata.create_all(engine) - with Session(engine) as session: - session.add( - recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) - ) - session.add( - recorder.db_schema.SchemaChanges( - schema_version=old_db_schema.SCHEMA_VERSION + + def _create_engine_test(*args, **kwargs): + """Test version of create_engine that initializes with old schema. + + This simulates an existing db with the old schema. + """ + importlib.import_module(schema_module) + old_db_schema = sys.modules[schema_module] + engine = create_engine(*args, **kwargs) + old_db_schema.Base.metadata.create_all(engine) + with Session(engine) as session: + session.add( + recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) ) + if initial_version is not None: + session.add( + recorder.db_schema.SchemaChanges(schema_version=initial_version) + ) + session.add( + recorder.db_schema.SchemaChanges( + schema_version=old_db_schema.SCHEMA_VERSION + ) + ) + session.commit() + return engine + + return _create_engine_test + + +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +@pytest.mark.parametrize( + ("initial_version", "expected_migrator_calls"), + [ + ( + 27, + { + "state_context_id_as_binary": 1, + "event_context_id_as_binary": 1, + "event_type_id_migration": 1, + "entity_id_migration": 1, + "event_id_post_migration": 1, + "entity_id_post_migration": 1, + }, + ), + ( + 28, + { + "state_context_id_as_binary": 1, + "event_context_id_as_binary": 1, + "event_type_id_migration": 1, + "entity_id_migration": 1, + "event_id_post_migration": 0, + "entity_id_post_migration": 1, + }, + ), + ( + 36, + { + "state_context_id_as_binary": 0, + "event_context_id_as_binary": 0, + "event_type_id_migration": 1, + "entity_id_migration": 1, + "event_id_post_migration": 0, + "entity_id_post_migration": 1, + }, + ), + ( + 37, + { + "state_context_id_as_binary": 0, + "event_context_id_as_binary": 0, + "event_type_id_migration": 0, + "entity_id_migration": 1, + "event_id_post_migration": 0, + "entity_id_post_migration": 1, + }, + ), + ( + 38, + { + "state_context_id_as_binary": 0, + "event_context_id_as_binary": 0, + "event_type_id_migration": 0, + "entity_id_migration": 0, + "event_id_post_migration": 0, + "entity_id_post_migration": 0, + }, + ), + ( + SCHEMA_VERSION, + { + "state_context_id_as_binary": 0, + "event_context_id_as_binary": 0, + "event_type_id_migration": 0, + "entity_id_migration": 0, + "event_id_post_migration": 0, + "entity_id_post_migration": 0, + }, + ), + ], +) +async def test_data_migrator_new_database( + async_test_recorder: RecorderInstanceGenerator, + initial_version: int, + expected_migrator_calls: dict[str, int], +) -> None: + """Test that the data migrators are not executed on a new database.""" + config = {recorder.CONF_COMMIT_INTERVAL: 1} + + def needs_migrate_mock() -> Mock: + return Mock( + spec_set=[], + return_value=migration.DataMigrationStatus( + needs_migrate=False, migration_done=True + ), ) - session.commit() - return engine + + migrator_mocks = { + "state_context_id_as_binary": needs_migrate_mock(), + "event_context_id_as_binary": needs_migrate_mock(), + "event_type_id_migration": needs_migrate_mock(), + "entity_id_migration": needs_migrate_mock(), + "event_id_post_migration": needs_migrate_mock(), + "entity_id_post_migration": needs_migrate_mock(), + } + + with ( + patch.object( + migration.StatesContextIDMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["state_context_id_as_binary"], + ), + patch.object( + migration.EventsContextIDMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["event_context_id_as_binary"], + ), + patch.object( + migration.EventTypeIDMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["event_type_id_migration"], + ), + patch.object( + migration.EntityIDMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["entity_id_migration"], + ), + patch.object( + migration.EventIDPostMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["event_id_post_migration"], + ), + patch.object( + migration.EntityIDPostMigration, + "needs_migrate_impl", + side_effect=migrator_mocks["entity_id_post_migration"], + ), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_CURRENT, initial_version=initial_version + ), + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass, config), + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await _async_wait_migration_done(hass) + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + await hass.async_stop() + + for migrator, mock in migrator_mocks.items(): + assert len(mock.mock_calls) == expected_migrator_calls[migrator] @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) @@ -84,8 +251,8 @@ async def test_migration_changes_prevent_trying_to_migrate_again( """ config = {recorder.CONF_COMMIT_INTERVAL: 1} - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] # Start with db schema that needs migration (version 32) with ( @@ -98,7 +265,7 @@ async def test_migration_changes_prevent_trying_to_migrate_again( patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), ): async with ( async_test_home_assistant() as hass, diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index d59486b61f0..21f7037c370 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -30,7 +30,9 @@ SCHEMA_MODULE_30 = "tests.components.recorder.db_schema_30" SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" -def _create_engine_test(schema_module: str) -> Callable: +def _create_engine_test( + schema_module: str, *, initial_version: int | None = None +) -> Callable: """Test version of create_engine that initializes with old schema. This simulates an existing db with the old schema. @@ -49,6 +51,10 @@ def _create_engine_test(schema_module: str) -> Callable: session.add( recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) ) + if initial_version is not None: + session.add( + recorder.db_schema.SchemaChanges(schema_version=initial_version) + ) session.add( recorder.db_schema.SchemaChanges( schema_version=old_db_schema.SCHEMA_VERSION @@ -70,7 +76,10 @@ async def test_migrate_times( async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, ) -> None: - """Test we can migrate times in the events and states tables.""" + """Test we can migrate times in the events and states tables. + + Also tests entity id post migration. + """ importlib.import_module(SCHEMA_MODULE_30) old_db_schema = sys.modules[SCHEMA_MODULE_30] now = dt_util.utcnow() @@ -122,7 +131,13 @@ async def test_migrate_times( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_30)), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_30, + initial_version=27, # Set to 27 for the entity id post migration to run + ), + ), ): async with ( async_test_home_assistant() as hass, @@ -274,7 +289,13 @@ async def test_migrate_can_resume_entity_id_post_migration( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_32, + initial_version=27, # Set to 27 for the entity id post migration to run + ), + ), ): async with ( async_test_home_assistant() as hass, @@ -394,7 +415,13 @@ async def test_migrate_can_resume_ix_states_event_id_removed( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_32, + initial_version=27, # Set to 27 for the entity id post migration to run + ), + ), ): async with ( async_test_home_assistant() as hass, @@ -527,7 +554,13 @@ async def test_out_of_disk_space_while_rebuild_states_table( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_32, + initial_version=27, # Set to 27 for the entity id post migration to run + ), + ), ): async with ( async_test_home_assistant() as hass, @@ -705,7 +738,13 @@ async def test_out_of_disk_space_while_removing_foreign_key( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), + patch( + CREATE_ENGINE_TARGET, + new=_create_engine_test( + SCHEMA_MODULE_32, + initial_version=27, # Set to 27 for the entity id post migration to run + ), + ), ): async with ( async_test_home_assistant() as hass, From e9e8228f07bac1a12cb6e256a5f9c9d13a5357fa Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Tue, 17 Dec 2024 20:18:16 +0100 Subject: [PATCH 375/677] Improve empty state handling for SomfyThermostat in Overkiz (#131700) --- .../overkiz/climate/somfy_thermostat.py | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/overkiz/climate/somfy_thermostat.py b/homeassistant/components/overkiz/climate/somfy_thermostat.py index 66a04af4e7a..d2aa1658302 100644 --- a/homeassistant/components/overkiz/climate/somfy_thermostat.py +++ b/homeassistant/components/overkiz/climate/somfy_thermostat.py @@ -57,10 +57,7 @@ class SomfyThermostat(OverkizEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_supported_features = ( - ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON + ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE ) _attr_hvac_modes = [*HVAC_MODES_TO_OVERKIZ] _attr_preset_modes = [*PRESET_MODES_TO_OVERKIZ] @@ -82,11 +79,12 @@ class SomfyThermostat(OverkizEntity, ClimateEntity): @property def hvac_mode(self) -> HVACMode: """Return hvac operation ie. heat, cool mode.""" - return OVERKIZ_TO_HVAC_MODES[ - cast( - str, self.executor.select_state(OverkizState.CORE_DEROGATION_ACTIVATION) - ) - ] + if derogation_activation := self.executor.select_state( + OverkizState.CORE_DEROGATION_ACTIVATION + ): + return OVERKIZ_TO_HVAC_MODES[cast(str, derogation_activation)] + + return HVACMode.AUTO @property def preset_mode(self) -> str: @@ -96,9 +94,10 @@ class SomfyThermostat(OverkizEntity, ClimateEntity): else: state_key = OverkizState.SOMFY_THERMOSTAT_DEROGATION_HEATING_MODE - state = cast(str, self.executor.select_state(state_key)) + if state := self.executor.select_state(state_key): + return OVERKIZ_TO_PRESET_MODES[OverkizCommandParam(cast(str, state))] - return OVERKIZ_TO_PRESET_MODES[OverkizCommandParam(state)] + return PRESET_NONE @property def current_temperature(self) -> float | None: From d785c4b0b1eb6a8a8c57cb80c06d4a367e4bcc7c Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 20:20:26 +0100 Subject: [PATCH 376/677] Add optional category in OptionsFlow to holiday (#129514) --- homeassistant/components/holiday/__init__.py | 11 +- homeassistant/components/holiday/calendar.py | 24 ++- .../components/holiday/config_flow.py | 178 +++++++++++++----- homeassistant/components/holiday/const.py | 1 + homeassistant/components/holiday/strings.json | 54 +++++- tests/components/holiday/test_config_flow.py | 151 +++++++++++++-- 6 files changed, 350 insertions(+), 69 deletions(-) diff --git a/homeassistant/components/holiday/__init__.py b/homeassistant/components/holiday/__init__.py index c9a58f29215..b364f2c67a4 100644 --- a/homeassistant/components/holiday/__init__.py +++ b/homeassistant/components/holiday/__init__.py @@ -11,7 +11,7 @@ from homeassistant.const import CONF_COUNTRY, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import SetupPhases, async_pause_setup -from .const import CONF_PROVINCE +from .const import CONF_CATEGORIES, CONF_PROVINCE PLATFORMS: list[Platform] = [Platform.CALENDAR] @@ -20,6 +20,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Holiday from a config entry.""" country: str = entry.data[CONF_COUNTRY] province: str | None = entry.data.get(CONF_PROVINCE) + categories: list[str] | None = entry.options.get(CONF_CATEGORIES) # We only import here to ensure that that its not imported later # in the event loop since the platforms will call country_holidays @@ -29,14 +30,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # the holidays library and it is not thread safe to import it in parallel # https://github.com/python/cpython/issues/83065 await hass.async_add_import_executor_job( - partial(country_holidays, country, subdiv=province) + partial(country_holidays, country, subdiv=province, categories=categories) ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) return True +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/holiday/calendar.py b/homeassistant/components/holiday/calendar.py index 6a336870857..6dccd972164 100644 --- a/homeassistant/components/holiday/calendar.py +++ b/homeassistant/components/holiday/calendar.py @@ -4,7 +4,7 @@ from __future__ import annotations from datetime import datetime, timedelta -from holidays import HolidayBase, country_holidays +from holidays import PUBLIC, HolidayBase, country_holidays from homeassistant.components.calendar import CalendarEntity, CalendarEvent from homeassistant.config_entries import ConfigEntry @@ -15,18 +15,27 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util import dt as dt_util -from .const import CONF_PROVINCE, DOMAIN +from .const import CONF_CATEGORIES, CONF_PROVINCE, DOMAIN def _get_obj_holidays_and_language( - country: str, province: str | None, language: str + country: str, + province: str | None, + language: str, + selected_categories: list[str] | None, ) -> tuple[HolidayBase, str]: """Get the object for the requested country and year.""" + if selected_categories is None: + categories = [PUBLIC] + else: + categories = [PUBLIC, *selected_categories] + obj_holidays = country_holidays( country, subdiv=province, years={dt_util.now().year, dt_util.now().year + 1}, language=language, + categories=categories, ) if language == "en": for lang in obj_holidays.supported_languages: @@ -36,6 +45,7 @@ def _get_obj_holidays_and_language( subdiv=province, years={dt_util.now().year, dt_util.now().year + 1}, language=lang, + categories=categories, ) language = lang break @@ -49,6 +59,7 @@ def _get_obj_holidays_and_language( subdiv=province, years={dt_util.now().year, dt_util.now().year + 1}, language=default_language, + categories=categories, ) language = default_language @@ -63,10 +74,11 @@ async def async_setup_entry( """Set up the Holiday Calendar config entry.""" country: str = config_entry.data[CONF_COUNTRY] province: str | None = config_entry.data.get(CONF_PROVINCE) + categories: list[str] | None = config_entry.options.get(CONF_CATEGORIES) language = hass.config.language obj_holidays, language = await hass.async_add_executor_job( - _get_obj_holidays_and_language, country, province, language + _get_obj_holidays_and_language, country, province, language, categories ) async_add_entities( @@ -76,6 +88,7 @@ async def async_setup_entry( country, province, language, + categories, obj_holidays, config_entry.entry_id, ) @@ -99,6 +112,7 @@ class HolidayCalendarEntity(CalendarEntity): country: str, province: str | None, language: str, + categories: list[str] | None, obj_holidays: HolidayBase, unique_id: str, ) -> None: @@ -107,6 +121,7 @@ class HolidayCalendarEntity(CalendarEntity): self._province = province self._location = name self._language = language + self._categories = categories self._attr_unique_id = unique_id self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, unique_id)}, @@ -172,6 +187,7 @@ class HolidayCalendarEntity(CalendarEntity): subdiv=self._province, years=list({start_date.year, end_date.year}), language=self._language, + categories=self._categories, ) event_list: list[CalendarEvent] = [] diff --git a/homeassistant/components/holiday/config_flow.py b/homeassistant/components/holiday/config_flow.py index 27b13e34851..00a71351ca7 100644 --- a/homeassistant/components/holiday/config_flow.py +++ b/homeassistant/components/holiday/config_flow.py @@ -5,11 +5,17 @@ from __future__ import annotations from typing import Any from babel import Locale, UnknownLocaleError -from holidays import list_supported_countries +from holidays import PUBLIC, country_holidays, list_supported_countries import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_COUNTRY +from homeassistant.core import callback from homeassistant.helpers.selector import ( CountrySelector, CountrySelectorConfig, @@ -17,12 +23,47 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, SelectSelectorMode, ) +from homeassistant.util import dt as dt_util -from .const import CONF_PROVINCE, DOMAIN +from .const import CONF_CATEGORIES, CONF_PROVINCE, DOMAIN SUPPORTED_COUNTRIES = list_supported_countries(include_aliases=False) +def get_optional_categories(country: str) -> list[str]: + """Return the country categories. + + public holidays are always included so they + don't need to be presented to the user. + """ + country_data = country_holidays(country, years=dt_util.utcnow().year) + return [ + category for category in country_data.supported_categories if category != PUBLIC + ] + + +def get_options_schema(country: str) -> vol.Schema: + """Return the options schema.""" + schema = {} + if provinces := SUPPORTED_COUNTRIES[country]: + schema[vol.Optional(CONF_PROVINCE)] = SelectSelector( + SelectSelectorConfig( + options=provinces, + mode=SelectSelectorMode.DROPDOWN, + ) + ) + if categories := get_optional_categories(country): + schema[vol.Optional(CONF_CATEGORIES)] = SelectSelector( + SelectSelectorConfig( + options=categories, + multiple=True, + mode=SelectSelectorMode.DROPDOWN, + translation_key="categories", + ) + ) + return vol.Schema(schema) + + class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Holiday.""" @@ -32,6 +73,12 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self.data: dict[str, Any] = {} + @staticmethod + @callback + def async_get_options_flow(config_entry: ConfigEntry) -> HolidayOptionsFlowHandler: + """Get the options flow for this handler.""" + return HolidayOptionsFlowHandler() + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -41,8 +88,11 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): selected_country = user_input[CONF_COUNTRY] - if SUPPORTED_COUNTRIES[selected_country]: - return await self.async_step_province() + options_schema = await self.hass.async_add_executor_job( + get_options_schema, selected_country + ) + if options_schema.schema: + return await self.async_step_options() self._async_abort_entries_match({CONF_COUNTRY: user_input[CONF_COUNTRY]}) @@ -67,24 +117,22 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): } ) - return self.async_show_form(step_id="user", data_schema=user_schema) + return self.async_show_form(data_schema=user_schema) - async def async_step_province( + async def async_step_options( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the province step.""" + """Handle the options step.""" if user_input is not None: - combined_input: dict[str, Any] = {**self.data, **user_input} + country = self.data[CONF_COUNTRY] + data = {CONF_COUNTRY: country} + options: dict[str, Any] | None = None + if province := user_input.get(CONF_PROVINCE): + data[CONF_PROVINCE] = province + if categories := user_input.get(CONF_CATEGORIES): + options = {CONF_CATEGORIES: categories} - country = combined_input[CONF_COUNTRY] - province = combined_input.get(CONF_PROVINCE) - - self._async_abort_entries_match( - { - CONF_COUNTRY: country, - CONF_PROVINCE: province, - } - ) + self._async_abort_entries_match({**data, **(options or {})}) try: locale = Locale.parse(self.hass.config.language, sep="-") @@ -95,38 +143,33 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): province_str = f", {province}" if province else "" name = f"{locale.territories[country]}{province_str}" - return self.async_create_entry(title=name, data=combined_input) + return self.async_create_entry(title=name, data=data, options=options) - province_schema = vol.Schema( - { - vol.Optional(CONF_PROVINCE): SelectSelector( - SelectSelectorConfig( - options=SUPPORTED_COUNTRIES[self.data[CONF_COUNTRY]], - mode=SelectSelectorMode.DROPDOWN, - ) - ), - } + options_schema = await self.hass.async_add_executor_job( + get_options_schema, self.data[CONF_COUNTRY] + ) + return self.async_show_form( + step_id="options", + data_schema=options_schema, + description_placeholders={CONF_COUNTRY: self.data[CONF_COUNTRY]}, ) - - return self.async_show_form(step_id="province", data_schema=province_schema) async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the re-configuration of a province.""" + """Handle the re-configuration of the options.""" reconfigure_entry = self._get_reconfigure_entry() + if user_input is not None: - combined_input: dict[str, Any] = {**reconfigure_entry.data, **user_input} + country = reconfigure_entry.data[CONF_COUNTRY] + data = {CONF_COUNTRY: country} + options: dict[str, Any] | None = None + if province := user_input.get(CONF_PROVINCE): + data[CONF_PROVINCE] = province + if categories := user_input.get(CONF_CATEGORIES): + options = {CONF_CATEGORIES: categories} - country = combined_input[CONF_COUNTRY] - province = combined_input.get(CONF_PROVINCE) - - self._async_abort_entries_match( - { - CONF_COUNTRY: country, - CONF_PROVINCE: province, - } - ) + self._async_abort_entries_match({**data, **(options or {})}) try: locale = Locale.parse(self.hass.config.language, sep="-") @@ -137,21 +180,60 @@ class HolidayConfigFlow(ConfigFlow, domain=DOMAIN): province_str = f", {province}" if province else "" name = f"{locale.territories[country]}{province_str}" + if options: + return self.async_update_reload_and_abort( + reconfigure_entry, title=name, data=data, options=options + ) return self.async_update_reload_and_abort( - reconfigure_entry, title=name, data=combined_input + reconfigure_entry, title=name, data=data ) - province_schema = vol.Schema( + options_schema = await self.hass.async_add_executor_job( + get_options_schema, reconfigure_entry.data[CONF_COUNTRY] + ) + + return self.async_show_form( + data_schema=options_schema, + description_placeholders={ + CONF_COUNTRY: reconfigure_entry.data[CONF_COUNTRY] + }, + ) + + +class HolidayOptionsFlowHandler(OptionsFlow): + """Handle Holiday options.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage Holiday options.""" + if user_input is not None: + return self.async_create_entry(data=user_input) + + categories = await self.hass.async_add_executor_job( + get_optional_categories, self.config_entry.data[CONF_COUNTRY] + ) + if not categories: + return self.async_abort(reason="no_categories") + + schema = vol.Schema( { - vol.Optional(CONF_PROVINCE): SelectSelector( + vol.Optional(CONF_CATEGORIES): SelectSelector( SelectSelectorConfig( - options=SUPPORTED_COUNTRIES[ - reconfigure_entry.data[CONF_COUNTRY] - ], + options=categories, + multiple=True, mode=SelectSelectorMode.DROPDOWN, + translation_key="categories", ) ) } ) - return self.async_show_form(step_id="reconfigure", data_schema=province_schema) + return self.async_show_form( + data_schema=self.add_suggested_values_to_schema( + schema, self.config_entry.options + ), + description_placeholders={ + CONF_COUNTRY: self.config_entry.data[CONF_COUNTRY] + }, + ) diff --git a/homeassistant/components/holiday/const.py b/homeassistant/components/holiday/const.py index ed283f82412..6a28ae1ffec 100644 --- a/homeassistant/components/holiday/const.py +++ b/homeassistant/components/holiday/const.py @@ -5,3 +5,4 @@ from typing import Final DOMAIN: Final = "holiday" CONF_PROVINCE: Final = "province" +CONF_CATEGORIES: Final = "categories" diff --git a/homeassistant/components/holiday/strings.json b/homeassistant/components/holiday/strings.json index ae4930ecdb4..d464f9e8bfd 100644 --- a/homeassistant/components/holiday/strings.json +++ b/homeassistant/components/holiday/strings.json @@ -2,7 +2,7 @@ "title": "Holiday", "config": { "abort": { - "already_configured": "Already configured. Only a single configuration for country/province combination possible.", + "already_configured": "Already configured. Only a single configuration for country/province/categories combination is possible.", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "step": { @@ -11,16 +11,62 @@ "country": "Country" } }, - "province": { + "options": { "data": { - "province": "Province" + "province": "Province", + "categories": "Categories" + }, + "data_description": { + "province": "Optionally choose a province / subdivision of {country}", + "categories": "Optionally choose additional holiday categories, public holidays are already included" } }, "reconfigure": { "data": { - "province": "[%key:component::holiday::config::step::province::data::province%]" + "province": "[%key:component::holiday::config::step::options::data::province%]", + "categories": "[%key:component::holiday::config::step::options::data::categories%]" + }, + "data_description": { + "province": "[%key:component::holiday::config::step::options::data_description::province%]", + "categories": "[%key:component::holiday::config::step::options::data_description::categories%]" } } } + }, + "options": { + "abort": { + "already_configured": "[%key:component::holiday::config::abort::already_configured%]", + "no_categories": "The country has no additional categories to configure." + }, + "step": { + "init": { + "data": { + "categories": "[%key:component::holiday::config::step::options::data::categories%]" + }, + "data_description": { + "categories": "[%key:component::holiday::config::step::options::data_description::categories%]" + } + } + } + }, + "selector": { + "device_class": { + "options": { + "armed_forces": "Armed forces", + "bank": "Bank", + "catholic": "Catholic", + "chinese": "Chinese", + "christian": "Christian", + "government": "Government", + "half_day": "Half day", + "hebrew": "Hebrew", + "hindu": "Hindu", + "islamic": "Islamic", + "optional": "Optional", + "school": "School", + "unofficial": "Unofficial", + "workday": "Workday" + } + } } } diff --git a/tests/components/holiday/test_config_flow.py b/tests/components/holiday/test_config_flow.py index 466dbaffd8b..f561c4a4b9f 100644 --- a/tests/components/holiday/test_config_flow.py +++ b/tests/components/holiday/test_config_flow.py @@ -1,19 +1,25 @@ """Test the Holiday config flow.""" +from datetime import datetime from unittest.mock import AsyncMock +from freezegun.api import FrozenDateTimeFactory +from holidays import UNOFFICIAL import pytest from homeassistant import config_entries -from homeassistant.components.holiday.const import CONF_PROVINCE, DOMAIN -from homeassistant.const import CONF_COUNTRY +from homeassistant.components.holiday.const import ( + CONF_CATEGORIES, + CONF_PROVINCE, + DOMAIN, +) +from homeassistant.const import CONF_COUNTRY, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry -pytestmark = pytest.mark.usefixtures("mock_setup_entry") - async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: """Test we get the form.""" @@ -49,6 +55,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") async def test_form_no_subdivision(hass: HomeAssistant) -> None: """Test we get the forms correctly without subdivision.""" result = await hass.config_entries.flow.async_init( @@ -71,6 +78,7 @@ async def test_form_no_subdivision(hass: HomeAssistant) -> None: } +@pytest.mark.usefixtures("mock_setup_entry") async def test_form_translated_title(hass: HomeAssistant) -> None: """Test the title gets translated.""" hass.config.language = "de" @@ -90,6 +98,7 @@ async def test_form_translated_title(hass: HomeAssistant) -> None: assert result2["title"] == "Schweden" +@pytest.mark.usefixtures("mock_setup_entry") async def test_single_combination_country_province(hass: HomeAssistant) -> None: """Test that configuring more than one instance is rejected.""" data_de = { @@ -129,6 +138,7 @@ async def test_single_combination_country_province(hass: HomeAssistant) -> None: assert result_de_step2["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_setup_entry") async def test_form_babel_unresolved_language(hass: HomeAssistant) -> None: """Test the config flow if using not babel supported language.""" hass.config.language = "en-XX" @@ -175,6 +185,7 @@ async def test_form_babel_unresolved_language(hass: HomeAssistant) -> None: } +@pytest.mark.usefixtures("mock_setup_entry") async def test_form_babel_replace_dash_with_underscore(hass: HomeAssistant) -> None: """Test the config flow if using language with dash.""" hass.config.language = "en-GB" @@ -221,7 +232,8 @@ async def test_form_babel_replace_dash_with_underscore(hass: HomeAssistant) -> N } -async def test_reconfigure(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_reconfigure(hass: HomeAssistant) -> None: """Test reconfigure flow.""" entry = MockConfigEntry( domain=DOMAIN, @@ -248,9 +260,38 @@ async def test_reconfigure(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> assert entry.data == {"country": "DE", "province": "NW"} -async def test_reconfigure_incorrect_language( - hass: HomeAssistant, mock_setup_entry: AsyncMock -) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_reconfigure_with_categories(hass: HomeAssistant) -> None: + """Test reconfigure flow with categories.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Unites States, TX", + data={"country": "US", "province": "TX"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PROVINCE: "AL", + CONF_CATEGORIES: [UNOFFICIAL], + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + entry = hass.config_entries.async_get_entry(entry.entry_id) + assert entry.title == "United States, AL" + assert entry.data == {CONF_COUNTRY: "US", CONF_PROVINCE: "AL"} + assert entry.options == {CONF_CATEGORIES: ["unofficial"]} + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_reconfigure_incorrect_language(hass: HomeAssistant) -> None: """Test reconfigure flow default to English.""" hass.config.language = "en-XX" @@ -279,9 +320,8 @@ async def test_reconfigure_incorrect_language( assert entry.data == {"country": "DE", "province": "NW"} -async def test_reconfigure_entry_exists( - hass: HomeAssistant, mock_setup_entry: AsyncMock -) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_reconfigure_entry_exists(hass: HomeAssistant) -> None: """Test reconfigure flow stops if other entry already exist.""" entry = MockConfigEntry( domain=DOMAIN, @@ -312,3 +352,92 @@ async def test_reconfigure_entry_exists( entry = hass.config_entries.async_get_entry(entry.entry_id) assert entry.title == "Germany, BW" assert entry.data == {"country": "DE", "province": "BW"} + + +async def test_form_with_options( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the flow with configuring options.""" + await hass.config.async_set_time_zone("America/Chicago") + zone = await dt_util.async_get_time_zone("America/Chicago") + # Oct 31st is a Friday. Unofficial holiday as Halloween + freezer.move_to(datetime(2024, 10, 31, 12, 0, 0, tzinfo=zone)) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_COUNTRY: "US", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PROVINCE: "TX", + CONF_CATEGORIES: [UNOFFICIAL], + }, + ) + await hass.async_block_till_done(wait_background_tasks=True) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "United States, TX" + assert result["data"] == { + CONF_COUNTRY: "US", + CONF_PROVINCE: "TX", + } + assert result["options"] == { + CONF_CATEGORIES: ["unofficial"], + } + + state = hass.states.get("calendar.united_states_tx") + assert state + assert state.state == STATE_ON + + entries = hass.config_entries.async_entries(DOMAIN) + entry = entries[0] + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + {CONF_CATEGORIES: []}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_CATEGORIES: [], + } + + state = hass.states.get("calendar.united_states_tx") + assert state + assert state.state == STATE_OFF + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_abort_no_categories(hass: HomeAssistant) -> None: + """Test the options flow abort if no categories to select.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_COUNTRY: "SE"}, + title="Sweden", + ) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_categories" From a7ba63bf86d75a1f3b7c5907cfa08f087c851edf Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Tue, 17 Dec 2024 20:22:07 +0100 Subject: [PATCH 377/677] Add missing CozyTouch servers to ConfigFlow expection handler in Overkiz (#131696) --- homeassistant/components/overkiz/config_flow.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/overkiz/config_flow.py b/homeassistant/components/overkiz/config_flow.py index 471a13d0de2..af7e277d928 100644 --- a/homeassistant/components/overkiz/config_flow.py +++ b/homeassistant/components/overkiz/config_flow.py @@ -151,9 +151,11 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN): except BadCredentialsException as exception: # If authentication with CozyTouch auth server is valid, but token is invalid # for Overkiz API server, the hardware is not supported. - if user_input[CONF_HUB] == Server.ATLANTIC_COZYTOUCH and not isinstance( - exception, CozyTouchBadCredentialsException - ): + if user_input[CONF_HUB] in { + Server.ATLANTIC_COZYTOUCH, + Server.SAUTER_COZYTOUCH, + Server.THERMOR_COZYTOUCH, + } and not isinstance(exception, CozyTouchBadCredentialsException): description_placeholders["unsupported_device"] = "CozyTouch" errors["base"] = "unsupported_hardware" else: From 8bbbbb00d5df3acb0650df86b9b4f7f974d6d1ec Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Tue, 17 Dec 2024 20:43:09 +0100 Subject: [PATCH 378/677] Limit unique_id migration to platform for BMW (#131582) --- .../bmw_connected_drive/__init__.py | 38 +++++++++++-------- .../bmw_connected_drive/test_init.py | 27 ++++++++++++- 2 files changed, 48 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/__init__.py b/homeassistant/components/bmw_connected_drive/__init__.py index 7b6fb4119db..05fa3e3cab0 100644 --- a/homeassistant/components/bmw_connected_drive/__init__.py +++ b/homeassistant/components/bmw_connected_drive/__init__.py @@ -73,23 +73,29 @@ async def _async_migrate_entries( @callback def update_unique_id(entry: er.RegistryEntry) -> dict[str, str] | None: replacements = { - "charging_level_hv": "fuel_and_battery.remaining_battery_percent", - "fuel_percent": "fuel_and_battery.remaining_fuel_percent", - "ac_current_limit": "charging_profile.ac_current_limit", - "charging_start_time": "fuel_and_battery.charging_start_time", - "charging_end_time": "fuel_and_battery.charging_end_time", - "charging_status": "fuel_and_battery.charging_status", - "charging_target": "fuel_and_battery.charging_target", - "remaining_battery_percent": "fuel_and_battery.remaining_battery_percent", - "remaining_range_total": "fuel_and_battery.remaining_range_total", - "remaining_range_electric": "fuel_and_battery.remaining_range_electric", - "remaining_range_fuel": "fuel_and_battery.remaining_range_fuel", - "remaining_fuel": "fuel_and_battery.remaining_fuel", - "remaining_fuel_percent": "fuel_and_battery.remaining_fuel_percent", - "activity": "climate.activity", + Platform.SENSOR.value: { + "charging_level_hv": "fuel_and_battery.remaining_battery_percent", + "fuel_percent": "fuel_and_battery.remaining_fuel_percent", + "ac_current_limit": "charging_profile.ac_current_limit", + "charging_start_time": "fuel_and_battery.charging_start_time", + "charging_end_time": "fuel_and_battery.charging_end_time", + "charging_status": "fuel_and_battery.charging_status", + "charging_target": "fuel_and_battery.charging_target", + "remaining_battery_percent": "fuel_and_battery.remaining_battery_percent", + "remaining_range_total": "fuel_and_battery.remaining_range_total", + "remaining_range_electric": "fuel_and_battery.remaining_range_electric", + "remaining_range_fuel": "fuel_and_battery.remaining_range_fuel", + "remaining_fuel": "fuel_and_battery.remaining_fuel", + "remaining_fuel_percent": "fuel_and_battery.remaining_fuel_percent", + "activity": "climate.activity", + } } - if (key := entry.unique_id.split("-")[-1]) in replacements: - new_unique_id = entry.unique_id.replace(key, replacements[key]) + if (key := entry.unique_id.split("-")[-1]) in replacements.get( + entry.domain, [] + ): + new_unique_id = entry.unique_id.replace( + key, replacements[entry.domain][key] + ) _LOGGER.debug( "Migrating entity '%s' unique_id from '%s' to '%s'", entry.entity_id, diff --git a/tests/components/bmw_connected_drive/test_init.py b/tests/components/bmw_connected_drive/test_init.py index e523b2b3d02..8507cacc376 100644 --- a/tests/components/bmw_connected_drive/test_init.py +++ b/tests/components/bmw_connected_drive/test_init.py @@ -10,7 +10,7 @@ from homeassistant.components.bmw_connected_drive.const import ( CONF_READ_ONLY, DOMAIN as BMW_DOMAIN, ) -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -18,6 +18,9 @@ from . import FIXTURE_CONFIG_ENTRY from tests.common import MockConfigEntry +BINARY_SENSOR_DOMAIN = Platform.BINARY_SENSOR.value +SENSOR_DOMAIN = Platform.SENSOR.value + VIN = "WBYYYYYYYYYYYYYYY" VEHICLE_NAME = "i3 (+ REX)" VEHICLE_NAME_SLUG = "i3_rex" @@ -109,6 +112,28 @@ async def test_migrate_options_from_data(hass: HomeAssistant) -> None: f"{VIN}-mileage", f"{VIN}-mileage", ), + ( + { + "domain": SENSOR_DOMAIN, + "platform": BMW_DOMAIN, + "unique_id": f"{VIN}-charging_status", + "suggested_object_id": f"{VEHICLE_NAME} Charging Status", + "disabled_by": None, + }, + f"{VIN}-charging_status", + f"{VIN}-fuel_and_battery.charging_status", + ), + ( + { + "domain": BINARY_SENSOR_DOMAIN, + "platform": BMW_DOMAIN, + "unique_id": f"{VIN}-charging_status", + "suggested_object_id": f"{VEHICLE_NAME} Charging Status", + "disabled_by": None, + }, + f"{VIN}-charging_status", + f"{VIN}-charging_status", + ), ], ) async def test_migrate_unique_ids( From 5e5bebd7eb5d9b183697d3402ca0cffb19f7dbaf Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Tue, 17 Dec 2024 20:43:53 +0100 Subject: [PATCH 379/677] Remove unused constants from SABnzbd (#133445) --- homeassistant/components/sabnzbd/const.py | 4 ---- homeassistant/components/sabnzbd/quality_scale.yaml | 5 +---- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/homeassistant/components/sabnzbd/const.py b/homeassistant/components/sabnzbd/const.py index 991490f5716..f05b3f19e98 100644 --- a/homeassistant/components/sabnzbd/const.py +++ b/homeassistant/components/sabnzbd/const.py @@ -1,15 +1,11 @@ """Constants for the Sabnzbd component.""" DOMAIN = "sabnzbd" -DATA_SABNZBD = "sabnzbd" ATTR_SPEED = "speed" ATTR_API_KEY = "api_key" -DEFAULT_HOST = "localhost" -DEFAULT_PORT = 8080 DEFAULT_SPEED_LIMIT = "100" -DEFAULT_SSL = False SERVICE_PAUSE = "pause" SERVICE_RESUME = "resume" diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml index c3fea2427ce..f5bae1c692b 100644 --- a/homeassistant/components/sabnzbd/quality_scale.yaml +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -6,10 +6,7 @@ rules: Do not remove services when all config entries are removed. appropriate-polling: done brands: done - common-modules: - status: todo - comment: | - const.py has unused variables. + common-modules: done config-flow-test-coverage: done config-flow: done dependency-transparency: done From c9ca1f63eacacc265931411eac0c607b6707455d Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Tue, 17 Dec 2024 20:44:24 +0100 Subject: [PATCH 380/677] Allow only single instance of energyzero integration (#133443) --- .../components/energyzero/manifest.json | 4 +++- homeassistant/generated/integrations.json | 5 +++-- tests/components/energyzero/test_config_flow.py | 17 +++++++++++++++++ 3 files changed, 23 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/energyzero/manifest.json b/homeassistant/components/energyzero/manifest.json index bb867e88d85..b647faebe1d 100644 --- a/homeassistant/components/energyzero/manifest.json +++ b/homeassistant/components/energyzero/manifest.json @@ -4,6 +4,8 @@ "codeowners": ["@klaasnicolaas"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/energyzero", + "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["energyzero==2.1.1"] + "requirements": ["energyzero==2.1.1"], + "single_config_entry": true } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index a94962b458b..7cb7a5a1aef 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1653,9 +1653,10 @@ }, "energyzero": { "name": "EnergyZero", - "integration_type": "hub", + "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "enigma2": { "name": "Enigma2 (OpenWebif)", diff --git a/tests/components/energyzero/test_config_flow.py b/tests/components/energyzero/test_config_flow.py index a9fe8534fd5..4c4e831e448 100644 --- a/tests/components/energyzero/test_config_flow.py +++ b/tests/components/energyzero/test_config_flow.py @@ -9,6 +9,8 @@ from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + async def test_full_user_flow( hass: HomeAssistant, @@ -33,3 +35,18 @@ async def test_full_user_flow( assert result2 == snapshot assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_single_instance( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test abort when setting up a duplicate entry.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "single_instance_allowed" From 9c26654db7a0d9de95c33f2da532b9535558b8bc Mon Sep 17 00:00:00 2001 From: Louis Christ Date: Tue, 17 Dec 2024 20:44:38 +0100 Subject: [PATCH 381/677] Use entity services in bluesound integration (#129266) --- .../components/bluesound/__init__.py | 2 - .../components/bluesound/manifest.json | 2 +- .../components/bluesound/media_player.py | 191 +++++++++++------- .../components/bluesound/services.py | 68 ------- homeassistant/components/bluesound/utils.py | 13 ++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/bluesound/conftest.py | 8 +- .../components/bluesound/test_media_player.py | 18 +- 9 files changed, 142 insertions(+), 164 deletions(-) delete mode 100644 homeassistant/components/bluesound/services.py diff --git a/homeassistant/components/bluesound/__init__.py b/homeassistant/components/bluesound/__init__.py index 82fe9b00d57..b3facc0b8ac 100644 --- a/homeassistant/components/bluesound/__init__.py +++ b/homeassistant/components/bluesound/__init__.py @@ -14,7 +14,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .services import setup_services CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -36,7 +35,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Bluesound.""" if DOMAIN not in hass.data: hass.data[DOMAIN] = [] - setup_services(hass) return True diff --git a/homeassistant/components/bluesound/manifest.json b/homeassistant/components/bluesound/manifest.json index 462112a8b78..151c1512b74 100644 --- a/homeassistant/components/bluesound/manifest.json +++ b/homeassistant/components/bluesound/manifest.json @@ -6,7 +6,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/bluesound", "iot_class": "local_polling", - "requirements": ["pyblu==1.0.4"], + "requirements": ["pyblu==2.0.0"], "zeroconf": [ { "type": "_musc._tcp.local." diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 38ef78fad3a..4882d543617 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -28,18 +28,26 @@ from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import config_validation as cv, issue_registry as ir +from homeassistant.helpers import ( + config_validation as cv, + entity_platform, + issue_registry as ir, +) from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, DeviceInfo, format_mac, ) +from homeassistant.helpers.dispatcher import ( + async_dispatcher_connect, + async_dispatcher_send, +) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN, INTEGRATION_TITLE -from .utils import format_unique_id +from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id if TYPE_CHECKING: from . import BluesoundConfigEntry @@ -51,6 +59,11 @@ SCAN_INTERVAL = timedelta(minutes=15) DATA_BLUESOUND = DOMAIN DEFAULT_PORT = 11000 +SERVICE_CLEAR_TIMER = "clear_sleep_timer" +SERVICE_JOIN = "join" +SERVICE_SET_TIMER = "set_sleep_timer" +SERVICE_UNJOIN = "unjoin" + NODE_OFFLINE_CHECK_TIMEOUT = 180 NODE_RETRY_INITIATION = timedelta(minutes=3) @@ -130,6 +143,18 @@ async def async_setup_entry( config_entry.runtime_data.sync_status, ) + platform = entity_platform.async_get_current_platform() + platform.async_register_entity_service( + SERVICE_SET_TIMER, None, "async_increase_timer" + ) + platform.async_register_entity_service( + SERVICE_CLEAR_TIMER, None, "async_clear_timer" + ) + platform.async_register_entity_service( + SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join" + ) + platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin") + hass.data[DATA_BLUESOUND].append(bluesound_player) async_add_entities([bluesound_player], update_before_add=True) @@ -175,13 +200,12 @@ class BluesoundPlayer(MediaPlayerEntity): self._status: Status | None = None self._inputs: list[Input] = [] self._presets: list[Preset] = [] - self._muted = False - self._master: BluesoundPlayer | None = None - self._is_master = False self._group_name: str | None = None self._group_list: list[str] = [] self._bluesound_device_name = sync_status.name self._player = player + self._is_leader = False + self._leader: BluesoundPlayer | None = None self._attr_unique_id = format_unique_id(sync_status.mac, port) # there should always be one player with the default port per mac @@ -250,6 +274,22 @@ class BluesoundPlayer(MediaPlayerEntity): name=f"bluesound.poll_sync_status_loop_{self.host}:{self.port}", ) + assert self._sync_status.id is not None + self.async_on_remove( + async_dispatcher_connect( + self.hass, + dispatcher_join_signal(self.entity_id), + self.async_add_follower, + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + dispatcher_unjoin_signal(self._sync_status.id), + self.async_remove_follower, + ) + ) + async def async_will_remove_from_hass(self) -> None: """Stop the polling task.""" await super().async_will_remove_from_hass() @@ -317,25 +357,25 @@ class BluesoundPlayer(MediaPlayerEntity): self._group_list = self.rebuild_bluesound_group() - if sync_status.master is not None: - self._is_master = False - master_id = f"{sync_status.master.ip}:{sync_status.master.port}" - master_device = [ + if sync_status.leader is not None: + self._is_leader = False + leader_id = f"{sync_status.leader.ip}:{sync_status.leader.port}" + leader_device = [ device for device in self.hass.data[DATA_BLUESOUND] - if device.id == master_id + if device.id == leader_id ] - if master_device and master_id != self.id: - self._master = master_device[0] + if leader_device and leader_id != self.id: + self._leader = leader_device[0] else: - self._master = None - _LOGGER.error("Master not found %s", master_id) + self._leader = None + _LOGGER.error("Leader not found %s", leader_id) else: - if self._master is not None: - self._master = None - slaves = self._sync_status.slaves - self._is_master = slaves is not None + if self._leader is not None: + self._leader = None + followers = self._sync_status.followers + self._is_leader = followers is not None self.async_write_ha_state() @@ -355,7 +395,7 @@ class BluesoundPlayer(MediaPlayerEntity): if self._status is None: return MediaPlayerState.OFF - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return MediaPlayerState.IDLE match self._status.state: @@ -369,7 +409,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def media_title(self) -> str | None: """Title of current playing media.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None return self._status.name @@ -380,7 +420,7 @@ class BluesoundPlayer(MediaPlayerEntity): if self._status is None: return None - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return self._group_name return self._status.artist @@ -388,7 +428,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def media_album_name(self) -> str | None: """Artist of current playing media (Music track only).""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None return self._status.album @@ -396,7 +436,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def media_image_url(self) -> str | None: """Image url of current playing media.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None url = self._status.image @@ -411,7 +451,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def media_position(self) -> int | None: """Position of current playing media in seconds.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None mediastate = self.state @@ -430,7 +470,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def media_duration(self) -> int | None: """Duration of current playing media in seconds.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None duration = self._status.total_seconds @@ -489,7 +529,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def source_list(self) -> list[str] | None: """List of available input sources.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None sources = [x.text for x in self._inputs] @@ -500,7 +540,7 @@ class BluesoundPlayer(MediaPlayerEntity): @property def source(self) -> str | None: """Name of the current input source.""" - if self._status is None or (self.is_grouped and not self.is_master): + if self._status is None or (self.is_grouped and not self.is_leader): return None if self._status.input_id is not None: @@ -520,7 +560,7 @@ class BluesoundPlayer(MediaPlayerEntity): if self._status is None: return MediaPlayerEntityFeature(0) - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return ( MediaPlayerEntityFeature.VOLUME_STEP | MediaPlayerEntityFeature.VOLUME_SET @@ -560,14 +600,17 @@ class BluesoundPlayer(MediaPlayerEntity): return supported @property - def is_master(self) -> bool: - """Return true if player is a coordinator.""" - return self._is_master + def is_leader(self) -> bool: + """Return true if player is leader of a group.""" + return self._sync_status.followers is not None @property def is_grouped(self) -> bool: - """Return true if player is a coordinator.""" - return self._master is not None or self._is_master + """Return true if player is member or leader of a group.""" + return ( + self._sync_status.followers is not None + or self._sync_status.leader is not None + ) @property def shuffle(self) -> bool: @@ -580,25 +623,25 @@ class BluesoundPlayer(MediaPlayerEntity): async def async_join(self, master: str) -> None: """Join the player to a group.""" - master_device = [ - device - for device in self.hass.data[DATA_BLUESOUND] - if device.entity_id == master - ] + if master == self.entity_id: + raise ServiceValidationError("Cannot join player to itself") - if len(master_device) > 0: - if self.id == master_device[0].id: - raise ServiceValidationError("Cannot join player to itself") + _LOGGER.debug("Trying to join player: %s", self.id) + async_dispatcher_send( + self.hass, dispatcher_join_signal(master), self.host, self.port + ) - _LOGGER.debug( - "Trying to join player: %s to master: %s", - self.id, - master_device[0].id, - ) + async def async_unjoin(self) -> None: + """Unjoin the player from a group.""" + if self._sync_status.leader is None: + return - await master_device[0].async_add_slave(self) - else: - _LOGGER.error("Master not found %s", master_device) + leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}" + + _LOGGER.debug("Trying to unjoin player: %s", self.id) + async_dispatcher_send( + self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port + ) @property def extra_state_attributes(self) -> dict[str, Any] | None: @@ -607,31 +650,31 @@ class BluesoundPlayer(MediaPlayerEntity): if self._group_list: attributes = {ATTR_BLUESOUND_GROUP: self._group_list} - attributes[ATTR_MASTER] = self._is_master + attributes[ATTR_MASTER] = self.is_leader return attributes def rebuild_bluesound_group(self) -> list[str]: """Rebuild the list of entities in speaker group.""" - if self.sync_status.master is None and self.sync_status.slaves is None: + if self.sync_status.leader is None and self.sync_status.followers is None: return [] player_entities: list[BluesoundPlayer] = self.hass.data[DATA_BLUESOUND] leader_sync_status: SyncStatus | None = None - if self.sync_status.master is None: + if self.sync_status.leader is None: leader_sync_status = self.sync_status else: - required_id = f"{self.sync_status.master.ip}:{self.sync_status.master.port}" + required_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}" for x in player_entities: if x.sync_status.id == required_id: leader_sync_status = x.sync_status break - if leader_sync_status is None or leader_sync_status.slaves is None: + if leader_sync_status is None or leader_sync_status.followers is None: return [] - follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.slaves] + follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.followers] follower_names = [ x.sync_status.name for x in player_entities @@ -640,21 +683,13 @@ class BluesoundPlayer(MediaPlayerEntity): follower_names.insert(0, leader_sync_status.name) return follower_names - async def async_unjoin(self) -> None: - """Unjoin the player from a group.""" - if self._master is None: - return + async def async_add_follower(self, host: str, port: int) -> None: + """Add follower to leader.""" + await self._player.add_follower(host, port) - _LOGGER.debug("Trying to unjoin player: %s", self.id) - await self._master.async_remove_slave(self) - - async def async_add_slave(self, slave_device: BluesoundPlayer) -> None: - """Add slave to master.""" - await self._player.add_slave(slave_device.host, slave_device.port) - - async def async_remove_slave(self, slave_device: BluesoundPlayer) -> None: - """Remove slave to master.""" - await self._player.remove_slave(slave_device.host, slave_device.port) + async def async_remove_follower(self, host: str, port: int) -> None: + """Remove follower to leader.""" + await self._player.remove_follower(host, port) async def async_increase_timer(self) -> int: """Increase sleep time on player.""" @@ -672,7 +707,7 @@ class BluesoundPlayer(MediaPlayerEntity): async def async_select_source(self, source: str) -> None: """Select input source.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return # presets and inputs might have the same name; presets have priority @@ -691,49 +726,49 @@ class BluesoundPlayer(MediaPlayerEntity): async def async_clear_playlist(self) -> None: """Clear players playlist.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.clear() async def async_media_next_track(self) -> None: """Send media_next command to media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.skip() async def async_media_previous_track(self) -> None: """Send media_previous command to media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.back() async def async_media_play(self) -> None: """Send media_play command to media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.play() async def async_media_pause(self) -> None: """Send media_pause command to media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.pause() async def async_media_stop(self) -> None: """Send stop command.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.stop() async def async_media_seek(self, position: float) -> None: """Send media_seek command to media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return await self._player.play(seek=int(position)) @@ -742,7 +777,7 @@ class BluesoundPlayer(MediaPlayerEntity): self, media_type: MediaType | str, media_id: str, **kwargs: Any ) -> None: """Send the play_media command to the media player.""" - if self.is_grouped and not self.is_master: + if self.is_grouped and not self.is_leader: return if media_source.is_media_source_id(media_id): diff --git a/homeassistant/components/bluesound/services.py b/homeassistant/components/bluesound/services.py deleted file mode 100644 index 06a507420f8..00000000000 --- a/homeassistant/components/bluesound/services.py +++ /dev/null @@ -1,68 +0,0 @@ -"""Support for Bluesound devices.""" - -from __future__ import annotations - -from typing import NamedTuple - -import voluptuous as vol - -from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv - -from .const import ATTR_MASTER, DOMAIN - -SERVICE_CLEAR_TIMER = "clear_sleep_timer" -SERVICE_JOIN = "join" -SERVICE_SET_TIMER = "set_sleep_timer" -SERVICE_UNJOIN = "unjoin" - -BS_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids}) - -BS_JOIN_SCHEMA = BS_SCHEMA.extend({vol.Required(ATTR_MASTER): cv.entity_id}) - - -class ServiceMethodDetails(NamedTuple): - """Details for SERVICE_TO_METHOD mapping.""" - - method: str - schema: vol.Schema - - -SERVICE_TO_METHOD = { - SERVICE_JOIN: ServiceMethodDetails(method="async_join", schema=BS_JOIN_SCHEMA), - SERVICE_UNJOIN: ServiceMethodDetails(method="async_unjoin", schema=BS_SCHEMA), - SERVICE_SET_TIMER: ServiceMethodDetails( - method="async_increase_timer", schema=BS_SCHEMA - ), - SERVICE_CLEAR_TIMER: ServiceMethodDetails( - method="async_clear_timer", schema=BS_SCHEMA - ), -} - - -def setup_services(hass: HomeAssistant) -> None: - """Set up services for Bluesound component.""" - - async def async_service_handler(service: ServiceCall) -> None: - """Map services to method of Bluesound devices.""" - if not (method := SERVICE_TO_METHOD.get(service.service)): - return - - params = { - key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID - } - if entity_ids := service.data.get(ATTR_ENTITY_ID): - target_players = [ - player for player in hass.data[DOMAIN] if player.entity_id in entity_ids - ] - else: - target_players = hass.data[DOMAIN] - - for player in target_players: - await getattr(player, method.method)(**params) - - for service, method in SERVICE_TO_METHOD.items(): - hass.services.async_register( - DOMAIN, service, async_service_handler, schema=method.schema - ) diff --git a/homeassistant/components/bluesound/utils.py b/homeassistant/components/bluesound/utils.py index 89a6fd1e787..5df5b32de95 100644 --- a/homeassistant/components/bluesound/utils.py +++ b/homeassistant/components/bluesound/utils.py @@ -6,3 +6,16 @@ from homeassistant.helpers.device_registry import format_mac def format_unique_id(mac: str, port: int) -> str: """Generate a unique ID based on the MAC address and port number.""" return f"{format_mac(mac)}-{port}" + + +def dispatcher_join_signal(entity_id: str) -> str: + """Join an entity ID with a signal.""" + return f"bluesound_join_{entity_id}" + + +def dispatcher_unjoin_signal(leader_id: str) -> str: + """Unjoin an entity ID with a signal. + + Id is ip_address:port. This can be obtained from sync_status.id. + """ + return f"bluesound_unjoin_{leader_id}" diff --git a/requirements_all.txt b/requirements_all.txt index 2540a297334..2bcbf0535c6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1803,7 +1803,7 @@ pybbox==0.0.5-alpha pyblackbird==0.6 # homeassistant.components.bluesound -pyblu==1.0.4 +pyblu==2.0.0 # homeassistant.components.neato pybotvac==0.0.25 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fe528899ad3..9cdb1039503 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1477,7 +1477,7 @@ pybalboa==1.0.2 pyblackbird==0.6 # homeassistant.components.bluesound -pyblu==1.0.4 +pyblu==2.0.0 # homeassistant.components.neato pybotvac==0.0.25 diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py index b4ee61dee57..717c9f61850 100644 --- a/tests/components/bluesound/conftest.py +++ b/tests/components/bluesound/conftest.py @@ -81,11 +81,11 @@ class PlayerMockData: volume_db=0.5, volume=50, group=None, - master=None, - slaves=None, + leader=None, + followers=None, zone=None, - zone_master=None, - zone_slave=None, + zone_leader=None, + zone_follower=None, mute_volume_db=None, mute_volume=None, ) diff --git a/tests/components/bluesound/test_media_player.py b/tests/components/bluesound/test_media_player.py index 217225628f2..a43696a0a7f 100644 --- a/tests/components/bluesound/test_media_player.py +++ b/tests/components/bluesound/test_media_player.py @@ -11,7 +11,7 @@ from syrupy.filters import props from homeassistant.components.bluesound import DOMAIN as BLUESOUND_DOMAIN from homeassistant.components.bluesound.const import ATTR_MASTER -from homeassistant.components.bluesound.services import ( +from homeassistant.components.bluesound.media_player import ( SERVICE_CLEAR_TIMER, SERVICE_JOIN, SERVICE_SET_TIMER, @@ -259,7 +259,7 @@ async def test_join( blocking=True, ) - player_mocks.player_data_secondary.player.add_slave.assert_called_once_with( + player_mocks.player_data_secondary.player.add_follower.assert_called_once_with( "1.1.1.1", 11000 ) @@ -273,7 +273,7 @@ async def test_unjoin( """Test the unjoin action.""" updated_sync_status = dataclasses.replace( player_mocks.player_data.sync_status_long_polling_mock.get(), - master=PairedPlayer("2.2.2.2", 11000), + leader=PairedPlayer("2.2.2.2", 11000), ) player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) @@ -287,7 +287,7 @@ async def test_unjoin( blocking=True, ) - player_mocks.player_data_secondary.player.remove_slave.assert_called_once_with( + player_mocks.player_data_secondary.player.remove_follower.assert_called_once_with( "1.1.1.1", 11000 ) @@ -297,7 +297,7 @@ async def test_attr_master( setup_config_entry: None, player_mocks: PlayerMocks, ) -> None: - """Test the media player master.""" + """Test the media player leader.""" attr_master = hass.states.get("media_player.player_name1111").attributes[ ATTR_MASTER ] @@ -305,7 +305,7 @@ async def test_attr_master( updated_sync_status = dataclasses.replace( player_mocks.player_data.sync_status_long_polling_mock.get(), - slaves=[PairedPlayer("2.2.2.2", 11000)], + followers=[PairedPlayer("2.2.2.2", 11000)], ) player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) @@ -333,7 +333,7 @@ async def test_attr_bluesound_group( updated_sync_status = dataclasses.replace( player_mocks.player_data.sync_status_long_polling_mock.get(), - slaves=[PairedPlayer("2.2.2.2", 11000)], + followers=[PairedPlayer("2.2.2.2", 11000)], ) player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) @@ -361,7 +361,7 @@ async def test_attr_bluesound_group_for_follower( updated_sync_status = dataclasses.replace( player_mocks.player_data.sync_status_long_polling_mock.get(), - slaves=[PairedPlayer("2.2.2.2", 11000)], + followers=[PairedPlayer("2.2.2.2", 11000)], ) player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) @@ -370,7 +370,7 @@ async def test_attr_bluesound_group_for_follower( updated_sync_status = dataclasses.replace( player_mocks.player_data_secondary.sync_status_long_polling_mock.get(), - master=PairedPlayer("1.1.1.1", 11000), + leader=PairedPlayer("1.1.1.1", 11000), ) player_mocks.player_data_secondary.sync_status_long_polling_mock.set( updated_sync_status From 935bf3fb112e609f50ba7cdecf54b9f27820acc8 Mon Sep 17 00:00:00 2001 From: jimmyd-be <34766203+jimmyd-be@users.noreply.github.com> Date: Tue, 17 Dec 2024 20:49:42 +0100 Subject: [PATCH 382/677] Bump renson-endura-delta to 1.7.2 (#129491) --- homeassistant/components/renson/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/renson/manifest.json b/homeassistant/components/renson/manifest.json index fa94207748e..fcc482959f2 100644 --- a/homeassistant/components/renson/manifest.json +++ b/homeassistant/components/renson/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/renson", "iot_class": "local_polling", - "requirements": ["renson-endura-delta==1.7.1"] + "requirements": ["renson-endura-delta==1.7.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2bcbf0535c6..37504e5ec41 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2566,7 +2566,7 @@ regenmaschine==2024.03.0 renault-api==0.2.8 # homeassistant.components.renson -renson-endura-delta==1.7.1 +renson-endura-delta==1.7.2 # homeassistant.components.reolink reolink-aio==0.11.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9cdb1039503..55bb0e6ac1f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2063,7 +2063,7 @@ regenmaschine==2024.03.0 renault-api==0.2.8 # homeassistant.components.renson -renson-endura-delta==1.7.1 +renson-endura-delta==1.7.2 # homeassistant.components.reolink reolink-aio==0.11.5 From b124ebeb1f58b83f7a1ec54398ac13854ed43268 Mon Sep 17 00:00:00 2001 From: benjamin-dcs <78026082+benjamin-dcs@users.noreply.github.com> Date: Tue, 17 Dec 2024 20:54:30 +0100 Subject: [PATCH 383/677] Differentiate File integration entries by prefixing the title with the platform instead (#131016) Differentiate File integration entries by prefixes the title with the platform --- homeassistant/components/file/config_flow.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/file/config_flow.py b/homeassistant/components/file/config_flow.py index 992635d05fd..1c4fdbe5c84 100644 --- a/homeassistant/components/file/config_flow.py +++ b/homeassistant/components/file/config_flow.py @@ -32,7 +32,7 @@ from homeassistant.helpers.selector import ( TextSelectorType, ) -from .const import CONF_TIMESTAMP, DEFAULT_NAME, DOMAIN +from .const import CONF_TIMESTAMP, DOMAIN BOOLEAN_SELECTOR = BooleanSelector(BooleanSelectorConfig()) TEMPLATE_SELECTOR = TemplateSelector(TemplateSelectorConfig()) @@ -105,7 +105,7 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): if not await self.validate_file_path(user_input[CONF_FILE_PATH]): errors[CONF_FILE_PATH] = "not_allowed" else: - title = f"{DEFAULT_NAME} [{user_input[CONF_FILE_PATH]}]" + title = f"{platform.capitalize()} [{user_input[CONF_FILE_PATH]}]" data = deepcopy(user_input) options = {} for key, value in user_input.items(): From 5014f305bf06ce11977e4bc8167ee501565fe3e3 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Tue, 17 Dec 2024 20:57:04 +0100 Subject: [PATCH 384/677] Mark docs-removal-instructions for SABnzbd as done (#133446) --- homeassistant/components/sabnzbd/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml index f5bae1c692b..5539359d977 100644 --- a/homeassistant/components/sabnzbd/quality_scale.yaml +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -16,7 +16,7 @@ rules: The integration has deprecated the actions, thus the documentation has been removed. docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done entity-event-setup: status: exempt comment: | From 21c3bf48f93d49703bcdfd73c53136f520b3aca6 Mon Sep 17 00:00:00 2001 From: Klaas Schoute Date: Tue, 17 Dec 2024 21:02:39 +0100 Subject: [PATCH 385/677] Allow only single instance of easyenergy integration (#133447) --- .../components/easyenergy/manifest.json | 4 +++- homeassistant/generated/integrations.json | 5 +++-- tests/components/easyenergy/test_config_flow.py | 17 +++++++++++++++++ 3 files changed, 23 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/easyenergy/manifest.json b/homeassistant/components/easyenergy/manifest.json index 25432196169..5cecb1d49f6 100644 --- a/homeassistant/components/easyenergy/manifest.json +++ b/homeassistant/components/easyenergy/manifest.json @@ -4,6 +4,8 @@ "codeowners": ["@klaasnicolaas"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/easyenergy", + "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["easyenergy==2.1.2"] + "requirements": ["easyenergy==2.1.2"], + "single_config_entry": true } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 7cb7a5a1aef..5fc09fcd70f 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1441,9 +1441,10 @@ }, "easyenergy": { "name": "easyEnergy", - "integration_type": "hub", + "integration_type": "service", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "ebox": { "name": "EBox", diff --git a/tests/components/easyenergy/test_config_flow.py b/tests/components/easyenergy/test_config_flow.py index da7048793b3..44bc8421126 100644 --- a/tests/components/easyenergy/test_config_flow.py +++ b/tests/components/easyenergy/test_config_flow.py @@ -7,6 +7,8 @@ from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + async def test_full_user_flow( hass: HomeAssistant, @@ -31,3 +33,18 @@ async def test_full_user_flow( assert result2.get("data") == {} assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_single_instance( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test abort when setting up a duplicate entry.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "single_instance_allowed" From eae25023e78a718836c561959fa5b6712a51e2b3 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Tue, 17 Dec 2024 21:27:41 +0100 Subject: [PATCH 386/677] Do not remove services when last config entry is unloaded in SABnzbd (#133449) --- homeassistant/components/sabnzbd/__init__.py | 16 +--------------- .../components/sabnzbd/quality_scale.yaml | 5 +---- 2 files changed, 2 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/sabnzbd/__init__.py b/homeassistant/components/sabnzbd/__init__.py index 2e3d6dd613c..fee459340f3 100644 --- a/homeassistant/components/sabnzbd/__init__.py +++ b/homeassistant/components/sabnzbd/__init__.py @@ -8,7 +8,6 @@ from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError @@ -165,17 +164,4 @@ async def async_setup_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> b async def async_unload_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> bool: """Unload a Sabnzbd config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - loaded_entries = [ - entry - for entry in hass.config_entries.async_entries(DOMAIN) - if entry.state == ConfigEntryState.LOADED - ] - if len(loaded_entries) == 1: - # If this is the last loaded instance of Sabnzbd, deregister any services - # defined during integration setup: - for service_name in SERVICES: - hass.services.async_remove(DOMAIN, service_name) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml index 5539359d977..ef4e72b4936 100644 --- a/homeassistant/components/sabnzbd/quality_scale.yaml +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -1,9 +1,6 @@ rules: # Bronze - action-setup: - status: todo - comment: | - Do not remove services when all config entries are removed. + action-setup: done appropriate-polling: done brands: done common-modules: done From f8cd6204ca5d3722b8b60d97fb5fd2355907d008 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 21:30:49 +0100 Subject: [PATCH 387/677] Fix reconfigure in Nord Pool (#133431) --- homeassistant/components/nordpool/config_flow.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/nordpool/config_flow.py b/homeassistant/components/nordpool/config_flow.py index 1d75d825e47..b3b807badad 100644 --- a/homeassistant/components/nordpool/config_flow.py +++ b/homeassistant/components/nordpool/config_flow.py @@ -99,10 +99,10 @@ class NordpoolConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the reconfiguration step.""" + reconfigure_entry = self._get_reconfigure_entry() errors: dict[str, str] = {} if user_input: errors = await test_api(self.hass, user_input) - reconfigure_entry = self._get_reconfigure_entry() if not errors: return self.async_update_reload_and_abort( reconfigure_entry, data_updates=user_input @@ -110,6 +110,8 @@ class NordpoolConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="reconfigure", - data_schema=DATA_SCHEMA, + data_schema=self.add_suggested_values_to_schema( + DATA_SCHEMA, user_input or reconfigure_entry.data + ), errors=errors, ) From 4c60e36f4f34590fd994fc4a2a23da8d61bfc944 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 17 Dec 2024 21:59:20 +0100 Subject: [PATCH 388/677] Add Get price service to Nord Pool (#130185) * Add get_price service to Nord Pool * Tests and fixes * Fixes * Not used fixtures * update qs * Fixes * docstring * Remove selector from strings * Mod service --- homeassistant/components/nordpool/__init__.py | 12 ++ homeassistant/components/nordpool/icons.json | 5 + .../components/nordpool/quality_scale.yaml | 15 +- homeassistant/components/nordpool/services.py | 129 ++++++++++++++ .../components/nordpool/services.yaml | 48 +++++ .../components/nordpool/strings.json | 39 ++++ .../nordpool/snapshots/test_services.ambr | 127 ++++++++++++++ tests/components/nordpool/test_services.py | 166 ++++++++++++++++++ 8 files changed, 529 insertions(+), 12 deletions(-) create mode 100644 homeassistant/components/nordpool/services.py create mode 100644 homeassistant/components/nordpool/services.yaml create mode 100644 tests/components/nordpool/snapshots/test_services.ambr create mode 100644 tests/components/nordpool/test_services.py diff --git a/homeassistant/components/nordpool/__init__.py b/homeassistant/components/nordpool/__init__.py index 82db98e2148..83f8edc8a8d 100644 --- a/homeassistant/components/nordpool/__init__.py +++ b/homeassistant/components/nordpool/__init__.py @@ -5,13 +5,25 @@ from __future__ import annotations from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util from .const import DOMAIN, PLATFORMS from .coordinator import NordPoolDataUpdateCoordinator +from .services import async_setup_services type NordPoolConfigEntry = ConfigEntry[NordPoolDataUpdateCoordinator] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Nord Pool service.""" + + async_setup_services(hass) + return True + async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: """Set up Nord Pool from a config entry.""" diff --git a/homeassistant/components/nordpool/icons.json b/homeassistant/components/nordpool/icons.json index 85434a2d09b..5a1a3df3d92 100644 --- a/homeassistant/components/nordpool/icons.json +++ b/homeassistant/components/nordpool/icons.json @@ -38,5 +38,10 @@ "default": "mdi:cash-multiple" } } + }, + "services": { + "get_prices_for_date": { + "service": "mdi:cash-multiple" + } } } diff --git a/homeassistant/components/nordpool/quality_scale.yaml b/homeassistant/components/nordpool/quality_scale.yaml index dada1115715..9c5160d0ccb 100644 --- a/homeassistant/components/nordpool/quality_scale.yaml +++ b/homeassistant/components/nordpool/quality_scale.yaml @@ -14,27 +14,18 @@ rules: comment: | Entities doesn't subscribe to events. dependency-transparency: done - action-setup: - status: exempt - comment: | - This integration does not provide additional actions. + action-setup: done common-modules: done docs-high-level-description: done docs-installation-instructions: done docs-removal-instructions: done - docs-actions: - status: exempt - comment: | - This integration does not provide additional actions. + docs-actions: done brands: done # Silver config-entry-unloading: done log-when-unavailable: done entity-unavailable: done - action-exceptions: - status: exempt - comment: | - No actions. + action-exceptions: done reauthentication-flow: status: exempt comment: | diff --git a/homeassistant/components/nordpool/services.py b/homeassistant/components/nordpool/services.py new file mode 100644 index 00000000000..872bd5b1e6b --- /dev/null +++ b/homeassistant/components/nordpool/services.py @@ -0,0 +1,129 @@ +"""Services for Nord Pool integration.""" + +from __future__ import annotations + +from datetime import date, datetime +import logging +from typing import TYPE_CHECKING + +from pynordpool import ( + AREAS, + Currency, + NordPoolAuthenticationError, + NordPoolEmptyResponseError, + NordPoolError, +) +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_DATE +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.selector import ConfigEntrySelector +from homeassistant.util import dt as dt_util +from homeassistant.util.json import JsonValueType + +if TYPE_CHECKING: + from . import NordPoolConfigEntry +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) +ATTR_CONFIG_ENTRY = "config_entry" +ATTR_AREAS = "areas" +ATTR_CURRENCY = "currency" + +SERVICE_GET_PRICES_FOR_DATE = "get_prices_for_date" +SERVICE_GET_PRICES_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + vol.Required(ATTR_DATE): cv.date, + vol.Optional(ATTR_AREAS): vol.All(vol.In(list(AREAS)), cv.ensure_list, [str]), + vol.Optional(ATTR_CURRENCY): vol.All( + cv.string, vol.In([currency.value for currency in Currency]) + ), + } +) + + +def get_config_entry(hass: HomeAssistant, entry_id: str) -> NordPoolConfigEntry: + """Return config entry.""" + if not (entry := hass.config_entries.async_get_entry(entry_id)): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="entry_not_found", + ) + if entry.state is not ConfigEntryState.LOADED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="entry_not_loaded", + ) + return entry + + +def async_setup_services(hass: HomeAssistant) -> None: + """Set up services for Nord Pool integration.""" + + async def get_prices_for_date(call: ServiceCall) -> ServiceResponse: + """Get price service.""" + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + asked_date: date = call.data[ATTR_DATE] + client = entry.runtime_data.client + + areas: list[str] = entry.data[ATTR_AREAS] + if _areas := call.data.get(ATTR_AREAS): + areas = _areas + + currency: str = entry.data[ATTR_CURRENCY] + if _currency := call.data.get(ATTR_CURRENCY): + currency = _currency + + areas = [area.upper() for area in areas] + currency = currency.upper() + + try: + price_data = await client.async_get_delivery_period( + datetime.combine(asked_date, dt_util.utcnow().time()), + Currency(currency), + areas, + ) + except NordPoolAuthenticationError as error: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="authentication_error", + ) from error + except NordPoolEmptyResponseError as error: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="empty_response", + ) from error + except NordPoolError as error: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="connection_error", + ) from error + + result: dict[str, JsonValueType] = {} + for area in areas: + result[area] = [ + { + "start": price_entry.start.isoformat(), + "end": price_entry.end.isoformat(), + "price": price_entry.entry[area], + } + for price_entry in price_data.entries + ] + return result + + hass.services.async_register( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + get_prices_for_date, + schema=SERVICE_GET_PRICES_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/nordpool/services.yaml b/homeassistant/components/nordpool/services.yaml new file mode 100644 index 00000000000..dded8482c6f --- /dev/null +++ b/homeassistant/components/nordpool/services.yaml @@ -0,0 +1,48 @@ +get_prices_for_date: + fields: + config_entry: + required: true + selector: + config_entry: + integration: nordpool + date: + required: true + selector: + date: + areas: + selector: + select: + options: + - "EE" + - "LT" + - "LV" + - "AT" + - "BE" + - "FR" + - "GER" + - "NL" + - "PL" + - "DK1" + - "DK2" + - "FI" + - "NO1" + - "NO2" + - "NO3" + - "NO4" + - "NO5" + - "SE1" + - "SE2" + - "SE3" + - "SE4" + - "SYS" + mode: dropdown + currency: + selector: + select: + options: + - "DKK" + - "EUR" + - "NOK" + - "PLN" + - "SEK" + mode: dropdown diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json index 96c22633c9e..d30898730b9 100644 --- a/homeassistant/components/nordpool/strings.json +++ b/homeassistant/components/nordpool/strings.json @@ -70,9 +70,48 @@ } } }, + "services": { + "get_prices_for_date": { + "name": "Get prices for date", + "description": "Retrieve the prices for a specific date.", + "fields": { + "config_entry": { + "name": "Select Nord Pool configuration entry", + "description": "Choose the configuration entry." + }, + "date": { + "name": "Date", + "description": "Only dates two months in the past and one day in the future is allowed." + }, + "areas": { + "name": "Areas", + "description": "One or multiple areas to get prices for. If left empty it will use the areas already configured." + }, + "currency": { + "name": "Currency", + "description": "Currency to get prices in. If left empty it will use the currency already configured." + } + } + } + }, "exceptions": { "initial_update_failed": { "message": "Initial update failed on startup with error {error}" + }, + "entry_not_found": { + "message": "The Nord Pool integration is not configured in Home Assistant." + }, + "entry_not_loaded": { + "message": "The Nord Pool integration is currently not loaded or disabled in Home Assistant." + }, + "authentication_error": { + "message": "There was an authentication error as you tried to retrieve data too far in the past." + }, + "empty_response": { + "message": "Nord Pool has not posted market prices for the provided date." + }, + "connection_error": { + "message": "There was a connection error connecting to the API. Try again later." } } } diff --git a/tests/components/nordpool/snapshots/test_services.ambr b/tests/components/nordpool/snapshots/test_services.ambr new file mode 100644 index 00000000000..6a57d7ecce9 --- /dev/null +++ b/tests/components/nordpool/snapshots/test_services.ambr @@ -0,0 +1,127 @@ +# serializer version: 1 +# name: test_service_call + dict({ + 'SE3': list([ + dict({ + 'end': '2024-11-05T00:00:00+00:00', + 'price': 250.73, + 'start': '2024-11-04T23:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T01:00:00+00:00', + 'price': 76.36, + 'start': '2024-11-05T00:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T02:00:00+00:00', + 'price': 73.92, + 'start': '2024-11-05T01:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T03:00:00+00:00', + 'price': 61.69, + 'start': '2024-11-05T02:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T04:00:00+00:00', + 'price': 64.6, + 'start': '2024-11-05T03:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T05:00:00+00:00', + 'price': 453.27, + 'start': '2024-11-05T04:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T06:00:00+00:00', + 'price': 996.28, + 'start': '2024-11-05T05:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T07:00:00+00:00', + 'price': 1406.14, + 'start': '2024-11-05T06:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T08:00:00+00:00', + 'price': 1346.54, + 'start': '2024-11-05T07:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T09:00:00+00:00', + 'price': 1150.28, + 'start': '2024-11-05T08:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T10:00:00+00:00', + 'price': 1031.32, + 'start': '2024-11-05T09:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T11:00:00+00:00', + 'price': 927.37, + 'start': '2024-11-05T10:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T12:00:00+00:00', + 'price': 925.05, + 'start': '2024-11-05T11:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T13:00:00+00:00', + 'price': 949.49, + 'start': '2024-11-05T12:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T14:00:00+00:00', + 'price': 1042.03, + 'start': '2024-11-05T13:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T15:00:00+00:00', + 'price': 1258.89, + 'start': '2024-11-05T14:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T16:00:00+00:00', + 'price': 1816.45, + 'start': '2024-11-05T15:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T17:00:00+00:00', + 'price': 2512.65, + 'start': '2024-11-05T16:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T18:00:00+00:00', + 'price': 1819.83, + 'start': '2024-11-05T17:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T19:00:00+00:00', + 'price': 1011.77, + 'start': '2024-11-05T18:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T20:00:00+00:00', + 'price': 835.53, + 'start': '2024-11-05T19:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T21:00:00+00:00', + 'price': 796.19, + 'start': '2024-11-05T20:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T22:00:00+00:00', + 'price': 522.3, + 'start': '2024-11-05T21:00:00+00:00', + }), + dict({ + 'end': '2024-11-05T23:00:00+00:00', + 'price': 289.14, + 'start': '2024-11-05T22:00:00+00:00', + }), + ]), + }) +# --- diff --git a/tests/components/nordpool/test_services.py b/tests/components/nordpool/test_services.py new file mode 100644 index 00000000000..224b4bc9981 --- /dev/null +++ b/tests/components/nordpool/test_services.py @@ -0,0 +1,166 @@ +"""Test services in Nord Pool.""" + +from unittest.mock import patch + +from pynordpool import ( + DeliveryPeriodData, + NordPoolAuthenticationError, + NordPoolEmptyResponseError, + NordPoolError, +) +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.components.nordpool.services import ( + ATTR_AREAS, + ATTR_CONFIG_ENTRY, + ATTR_CURRENCY, + SERVICE_GET_PRICES_FOR_DATE, +) +from homeassistant.const import ATTR_DATE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError + +from tests.common import MockConfigEntry + +TEST_SERVICE_DATA = { + ATTR_CONFIG_ENTRY: "to_replace", + ATTR_DATE: "2024-11-05", + ATTR_AREAS: "SE3", + ATTR_CURRENCY: "SEK", +} +TEST_SERVICE_DATA_USE_DEFAULTS = { + ATTR_CONFIG_ENTRY: "to_replace", + ATTR_DATE: "2024-11-05", +} + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_service_call( + hass: HomeAssistant, + load_int: MockConfigEntry, + get_data: DeliveryPeriodData, + snapshot: SnapshotAssertion, +) -> None: + """Test get_prices_for_date service call.""" + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + service_data = TEST_SERVICE_DATA.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) + + assert response == snapshot + price_value = response["SE3"][0]["price"] + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + service_data = TEST_SERVICE_DATA_USE_DEFAULTS.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) + + assert "SE3" in response + assert response["SE3"][0]["price"] == price_value + + +@pytest.mark.parametrize( + ("error", "key"), + [ + (NordPoolAuthenticationError, "authentication_error"), + (NordPoolEmptyResponseError, "empty_response"), + (NordPoolError, "connection_error"), + ], +) +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_service_call_failures( + hass: HomeAssistant, + load_int: MockConfigEntry, + error: Exception, + key: str, +) -> None: + """Test get_prices_for_date service call when it fails.""" + service_data = TEST_SERVICE_DATA.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=error, + ), + pytest.raises(ServiceValidationError) as err, + ): + await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) + assert err.value.translation_key == key + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_service_call_config_entry_bad_state( + hass: HomeAssistant, + load_int: MockConfigEntry, + get_data: DeliveryPeriodData, +) -> None: + """Test get_prices_for_date service call when config entry bad state.""" + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + pytest.raises(ServiceValidationError) as err, + ): + await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + TEST_SERVICE_DATA, + blocking=True, + return_response=True, + ) + assert err.value.translation_key == "entry_not_found" + + service_data = TEST_SERVICE_DATA.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + await hass.config_entries.async_unload(load_int.entry_id) + await hass.async_block_till_done() + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + pytest.raises(ServiceValidationError) as err, + ): + await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) + assert err.value.translation_key == "entry_not_loaded" From e73512e11c9938be4c274f2e824cda7d9fac1306 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Tue, 17 Dec 2024 22:49:04 +0000 Subject: [PATCH 389/677] Add integration_type to Idasen Desk (#132486) * Add Idasen Desk quality scale record * Update wrong checks * Add integration_type to Idasen Desk --- homeassistant/components/idasen_desk/manifest.json | 1 + homeassistant/components/idasen_desk/quality_scale.yaml | 5 ++++- homeassistant/generated/integrations.json | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/idasen_desk/manifest.json b/homeassistant/components/idasen_desk/manifest.json index 0f8c9eaafc9..2f53ec20e11 100644 --- a/homeassistant/components/idasen_desk/manifest.json +++ b/homeassistant/components/idasen_desk/manifest.json @@ -10,6 +10,7 @@ "config_flow": true, "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/idasen_desk", + "integration_type": "device", "iot_class": "local_push", "requirements": ["idasen-ha==2.6.2"] } diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml index 28381f98a3e..1b9ec8cd810 100644 --- a/homeassistant/components/idasen_desk/quality_scale.yaml +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -79,7 +79,10 @@ rules: docs-supported-functions: todo docs-troubleshooting: todo docs-use-cases: todo - dynamic-devices: todo + dynamic-devices: + status: exempt + comment: | + This integration has one device per config entry. entity-category: done entity-device-class: done entity-disabled-by-default: done diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 5fc09fcd70f..bd3c9eb04f9 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2818,7 +2818,7 @@ "name": "IKEA TR\u00c5DFRI" }, "idasen_desk": { - "integration_type": "hub", + "integration_type": "device", "config_flow": true, "iot_class": "local_push", "name": "IKEA Idasen Desk" From 9bff9c5e7bffc2239c9cf4d91d366214e8b42d92 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 17 Dec 2024 18:57:43 -0700 Subject: [PATCH 390/677] Ensure screenlogic retries if the protocol adapter is still booting (#133444) * Ensure screenlogic retries if the protocol adapter is still booting If the protocol adapter is still booting, it will disconnect and never retry ``` Traceback (most recent call last): File "/usr/src/homeassistant/homeassistant/config_entries.py", line 640, in __async_setup_with_context result = await component.async_setup_entry(hass, self) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/src/homeassistant/homeassistant/components/screenlogic/__init__.py", line 65, in async_setup_entry await gateway.async_connect(**connect_info) File "/usr/local/lib/python3.13/site-packages/screenlogicpy/gateway.py", line 142, in async_connect connectPkg = await async_connect_to_gateway( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ...<4 lines>... ) ^ File "/usr/local/lib/python3.13/site-packages/screenlogicpy/requests/login.py", line 107, in async_connect_to_gateway mac_address = await async_gateway_connect(transport, protocol, max_retries) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.13/site-packages/screenlogicpy/requests/login.py", line 77, in async_gateway_connect raise ScreenLogicConnectionError("Host unexpectedly disconnected.") screenlogicpy.const.common.ScreenLogicConnectionError: Host unexpectedly disconnected. ``` * coverage --- .../components/screenlogic/__init__.py | 3 +- tests/components/screenlogic/test_init.py | 36 ++++++++++++++++++- 2 files changed, 37 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/screenlogic/__init__.py b/homeassistant/components/screenlogic/__init__.py index 6f58e9b3666..972837f7d75 100644 --- a/homeassistant/components/screenlogic/__init__.py +++ b/homeassistant/components/screenlogic/__init__.py @@ -4,6 +4,7 @@ import logging from typing import Any from screenlogicpy import ScreenLogicError, ScreenLogicGateway +from screenlogicpy.const.common import ScreenLogicConnectionError from screenlogicpy.const.data import SHARED_VALUES from homeassistant.config_entries import ConfigEntry @@ -64,7 +65,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ScreenLogicConfigEntry) try: await gateway.async_connect(**connect_info) await gateway.async_update() - except ScreenLogicError as ex: + except (ScreenLogicConnectionError, ScreenLogicError) as ex: raise ConfigEntryNotReady(ex.msg) from ex coordinator = ScreenlogicDataUpdateCoordinator( diff --git a/tests/components/screenlogic/test_init.py b/tests/components/screenlogic/test_init.py index 6416c93f779..f21a1118b4f 100644 --- a/tests/components/screenlogic/test_init.py +++ b/tests/components/screenlogic/test_init.py @@ -4,12 +4,14 @@ from dataclasses import dataclass from unittest.mock import DEFAULT, patch import pytest -from screenlogicpy import ScreenLogicGateway +from screenlogicpy import ScreenLogicError, ScreenLogicGateway +from screenlogicpy.const.common import ScreenLogicConnectionError from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN from homeassistant.components.screenlogic import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import slugify @@ -284,3 +286,35 @@ async def test_platform_setup( for entity_id in tested_entity_ids: assert hass.states.get(entity_id) is not None + + +@pytest.mark.parametrize( + "exception", + [ScreenLogicConnectionError, ScreenLogicError], +) +async def test_retry_on_connect_exception( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, exception: Exception +) -> None: + """Test setup retries on expected exceptions.""" + + def stub_connect(*args, **kwargs): + raise exception + + mock_config_entry.add_to_hass(hass) + + with ( + patch( + GATEWAY_DISCOVERY_IMPORT_PATH, + return_value={}, + ), + patch.multiple( + ScreenLogicGateway, + async_connect=stub_connect, + is_connected=False, + _async_connected_request=DEFAULT, + ), + ): + assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY From dfdd83789ad858e60faa066aef7d6711cfbf2a9e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 08:05:39 +0100 Subject: [PATCH 391/677] Bump actions/upload-artifact from 4.4.3 to 4.5.0 (#133461) --- .github/workflows/builder.yml | 2 +- .github/workflows/ci.yaml | 22 +++++++++++----------- .github/workflows/wheels.yml | 6 +++--- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 8f419cca1da..20b1bd4c718 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: translations path: translations.tar.gz diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9d6f207382d..71924afecc8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -537,7 +537,7 @@ jobs: python --version uv pip freeze >> pip_freeze.txt - name: Upload pip_freeze artifact - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pip-freeze-${{ matrix.python-version }} path: pip_freeze.txt @@ -661,7 +661,7 @@ jobs: . venv/bin/activate python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json - name: Upload licenses - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: licenses-${{ github.run_number }}-${{ matrix.python-version }} path: licenses-${{ matrix.python-version }}.json @@ -877,7 +877,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pytest_buckets path: pytest_buckets.txt @@ -979,14 +979,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -1106,7 +1106,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1114,7 +1114,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1236,7 +1236,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1244,7 +1244,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1378,14 +1378,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index a36b3073aab..9ea9a557105 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -79,7 +79,7 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: env_file path: ./.env_file @@ -87,7 +87,7 @@ jobs: overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: requirements_diff path: ./requirements_diff.txt @@ -99,7 +99,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.4.3 + uses: actions/upload-artifact@v4.5.0 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt From c10473844fc63374b6e4cb55fc173e33ab113093 Mon Sep 17 00:00:00 2001 From: Assaf Inbal Date: Wed, 18 Dec 2024 09:36:42 +0200 Subject: [PATCH 392/677] Add sensors to Ituran integration (#133359) Add sensors to Ituran --- homeassistant/components/ituran/__init__.py | 1 + homeassistant/components/ituran/icons.json | 11 + .../components/ituran/quality_scale.yaml | 5 +- homeassistant/components/ituran/sensor.py | 119 +++++++ homeassistant/components/ituran/strings.json | 19 ++ tests/components/ituran/conftest.py | 6 +- .../ituran/snapshots/test_sensor.ambr | 297 ++++++++++++++++++ .../components/ituran/test_device_tracker.py | 7 +- tests/components/ituran/test_sensor.py | 76 +++++ 9 files changed, 533 insertions(+), 8 deletions(-) create mode 100644 homeassistant/components/ituran/sensor.py create mode 100644 tests/components/ituran/snapshots/test_sensor.ambr create mode 100644 tests/components/ituran/test_sensor.py diff --git a/homeassistant/components/ituran/__init__.py b/homeassistant/components/ituran/__init__.py index b0a26cf7db2..bf9cff238cd 100644 --- a/homeassistant/components/ituran/__init__.py +++ b/homeassistant/components/ituran/__init__.py @@ -9,6 +9,7 @@ from .coordinator import IturanConfigEntry, IturanDataUpdateCoordinator PLATFORMS: list[Platform] = [ Platform.DEVICE_TRACKER, + Platform.SENSOR, ] diff --git a/homeassistant/components/ituran/icons.json b/homeassistant/components/ituran/icons.json index a20ea5b7304..bd9182f1569 100644 --- a/homeassistant/components/ituran/icons.json +++ b/homeassistant/components/ituran/icons.json @@ -4,6 +4,17 @@ "car": { "default": "mdi:car" } + }, + "sensor": { + "address": { + "default": "mdi:map-marker" + }, + "battery_voltage": { + "default": "mdi:car-battery" + }, + "heading": { + "default": "mdi:compass" + } } } } diff --git a/homeassistant/components/ituran/quality_scale.yaml b/homeassistant/components/ituran/quality_scale.yaml index 71d0d9698da..cd7e17c3b12 100644 --- a/homeassistant/components/ituran/quality_scale.yaml +++ b/homeassistant/components/ituran/quality_scale.yaml @@ -55,10 +55,7 @@ rules: Only device_tracker platform. devices: done entity-category: todo - entity-disabled-by-default: - status: exempt - comment: | - No noisy entities + entity-disabled-by-default: done discovery: status: exempt comment: | diff --git a/homeassistant/components/ituran/sensor.py b/homeassistant/components/ituran/sensor.py new file mode 100644 index 00000000000..e962f5bd561 --- /dev/null +++ b/homeassistant/components/ituran/sensor.py @@ -0,0 +1,119 @@ +"""Sensors for Ituran vehicles.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime + +from pyituran import Vehicle + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.const import ( + DEGREE, + UnitOfElectricPotential, + UnitOfLength, + UnitOfSpeed, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import IturanConfigEntry +from .coordinator import IturanDataUpdateCoordinator +from .entity import IturanBaseEntity + + +@dataclass(frozen=True, kw_only=True) +class IturanSensorEntityDescription(SensorEntityDescription): + """Describes Ituran sensor entity.""" + + value_fn: Callable[[Vehicle], StateType | datetime] + + +SENSOR_TYPES: list[IturanSensorEntityDescription] = [ + IturanSensorEntityDescription( + key="address", + translation_key="address", + entity_registry_enabled_default=False, + value_fn=lambda vehicle: vehicle.address, + ), + IturanSensorEntityDescription( + key="battery_voltage", + translation_key="battery_voltage", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + suggested_display_precision=0, + entity_registry_enabled_default=False, + value_fn=lambda vehicle: vehicle.battery_voltage, + ), + IturanSensorEntityDescription( + key="heading", + translation_key="heading", + native_unit_of_measurement=DEGREE, + suggested_display_precision=0, + entity_registry_enabled_default=False, + value_fn=lambda vehicle: vehicle.heading, + ), + IturanSensorEntityDescription( + key="last_update_from_vehicle", + translation_key="last_update_from_vehicle", + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + value_fn=lambda vehicle: vehicle.last_update, + ), + IturanSensorEntityDescription( + key="mileage", + translation_key="mileage", + device_class=SensorDeviceClass.DISTANCE, + native_unit_of_measurement=UnitOfLength.KILOMETERS, + suggested_display_precision=2, + value_fn=lambda vehicle: vehicle.mileage, + ), + IturanSensorEntityDescription( + key="speed", + device_class=SensorDeviceClass.SPEED, + native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR, + suggested_display_precision=0, + value_fn=lambda vehicle: vehicle.speed, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: IturanConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Ituran sensors from config entry.""" + coordinator = config_entry.runtime_data + async_add_entities( + IturanSensor(coordinator, license_plate, description) + for description in SENSOR_TYPES + for license_plate in coordinator.data + ) + + +class IturanSensor(IturanBaseEntity, SensorEntity): + """Ituran device tracker.""" + + entity_description: IturanSensorEntityDescription + + def __init__( + self, + coordinator: IturanDataUpdateCoordinator, + license_plate: str, + description: IturanSensorEntityDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator, license_plate, description.key) + self.entity_description = description + + @property + def native_value(self) -> StateType | datetime: + """Return the state of the device.""" + return self.entity_description.value_fn(self.vehicle) diff --git a/homeassistant/components/ituran/strings.json b/homeassistant/components/ituran/strings.json index 212dbd1b86a..efc60ef454b 100644 --- a/homeassistant/components/ituran/strings.json +++ b/homeassistant/components/ituran/strings.json @@ -35,6 +35,25 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" } }, + "entity": { + "sensor": { + "address": { + "name": "Address" + }, + "battery_voltage": { + "name": "Battery voltage" + }, + "heading": { + "name": "Heading" + }, + "last_update_from_vehicle": { + "name": "Last update from vehicle" + }, + "mileage": { + "name": "Mileage" + } + } + }, "exceptions": { "api_error": { "message": "An error occurred while communicating with the Ituran service." diff --git a/tests/components/ituran/conftest.py b/tests/components/ituran/conftest.py index ef22c90591d..5093cc301a1 100644 --- a/tests/components/ituran/conftest.py +++ b/tests/components/ituran/conftest.py @@ -3,6 +3,7 @@ from collections.abc import Generator from datetime import datetime from unittest.mock import AsyncMock, PropertyMock, patch +from zoneinfo import ZoneInfo import pytest @@ -56,7 +57,10 @@ class MockVehicle: self.gps_coordinates = (25.0, -71.0) self.address = "Bermuda Triangle" self.heading = 150 - self.last_update = datetime(2024, 1, 1, 0, 0, 0) + self.last_update = datetime( + 2024, 1, 1, 0, 0, 0, tzinfo=ZoneInfo("Asia/Jerusalem") + ) + self.battery_voltage = 12.0 @pytest.fixture diff --git a/tests/components/ituran/snapshots/test_sensor.ambr b/tests/components/ituran/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c1512de912f --- /dev/null +++ b/tests/components/ituran/snapshots/test_sensor.ambr @@ -0,0 +1,297 @@ +# serializer version: 1 +# name: test_sensor[sensor.mock_model_address-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Address', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'address', + 'unique_id': '12345678-address', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.mock_model_address-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'mock model Address', + }), + 'context': , + 'entity_id': 'sensor.mock_model_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Bermuda Triangle', + }) +# --- +# name: test_sensor[sensor.mock_model_battery_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_battery_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery voltage', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_voltage', + 'unique_id': '12345678-battery_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.mock_model_battery_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'mock model Battery voltage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_model_battery_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.0', + }) +# --- +# name: test_sensor[sensor.mock_model_heading-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_heading', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heading', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heading', + 'unique_id': '12345678-heading', + 'unit_of_measurement': '°', + }) +# --- +# name: test_sensor[sensor.mock_model_heading-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'mock model Heading', + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.mock_model_heading', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '150', + }) +# --- +# name: test_sensor[sensor.mock_model_last_update_from_vehicle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_last_update_from_vehicle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last update from vehicle', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_update_from_vehicle', + 'unique_id': '12345678-last_update_from_vehicle', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.mock_model_last_update_from_vehicle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'mock model Last update from vehicle', + }), + 'context': , + 'entity_id': 'sensor.mock_model_last_update_from_vehicle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-12-31T22:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.mock_model_mileage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_mileage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mileage', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mileage', + 'unique_id': '12345678-mileage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.mock_model_mileage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'mock model Mileage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_model_mileage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1000', + }) +# --- +# name: test_sensor[sensor.mock_model_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_model_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Speed', + 'platform': 'ituran', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345678-speed', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.mock_model_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speed', + 'friendly_name': 'mock model Speed', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_model_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- diff --git a/tests/components/ituran/test_device_tracker.py b/tests/components/ituran/test_device_tracker.py index 7bcb314cde7..4fe92154e91 100644 --- a/tests/components/ituran/test_device_tracker.py +++ b/tests/components/ituran/test_device_tracker.py @@ -1,13 +1,13 @@ """Test the Ituran device_tracker.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory from pyituran.exceptions import IturanApiError from syrupy.assertion import SnapshotAssertion from homeassistant.components.ituran.const import UPDATE_INTERVAL -from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -24,7 +24,8 @@ async def test_device_tracker( mock_config_entry: MockConfigEntry, ) -> None: """Test state of device_tracker.""" - await setup_integration(hass, mock_config_entry) + with patch("homeassistant.components.ituran.PLATFORMS", [Platform.DEVICE_TRACKER]): + await setup_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/ituran/test_sensor.py b/tests/components/ituran/test_sensor.py new file mode 100644 index 00000000000..a057f59b81f --- /dev/null +++ b/tests/components/ituran/test_sensor.py @@ -0,0 +1,76 @@ +"""Test the Ituran device_tracker.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pyituran.exceptions import IturanApiError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.ituran.const import UPDATE_INTERVAL +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_ituran: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state of sensor.""" + with patch("homeassistant.components.ituran.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_availability( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_ituran: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test sensor is marked as unavailable when we can't reach the Ituran service.""" + entities = [ + "sensor.mock_model_address", + "sensor.mock_model_battery_voltage", + "sensor.mock_model_heading", + "sensor.mock_model_last_update_from_vehicle", + "sensor.mock_model_mileage", + "sensor.mock_model_speed", + ] + + await setup_integration(hass, mock_config_entry) + + for entity_id in entities: + state = hass.states.get(entity_id) + assert state + assert state.state != STATE_UNAVAILABLE + + mock_ituran.get_vehicles.side_effect = IturanApiError + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + for entity_id in entities: + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_UNAVAILABLE + + mock_ituran.get_vehicles.side_effect = None + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + for entity_id in entities: + state = hass.states.get(entity_id) + assert state + assert state.state != STATE_UNAVAILABLE From fab92d1cf84a5aa99766d443d2ad9be7baad0082 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 18 Dec 2024 02:40:27 -0500 Subject: [PATCH 393/677] Add reconfigure flow to Russound RIO (#133091) * Add reconfigure flow to Russound RIO * Mark reconfiguration flow as done * Update * Update --- .../components/russound_rio/config_flow.py | 23 ++++++- .../russound_rio/quality_scale.yaml | 7 +- .../components/russound_rio/strings.json | 19 +++++- tests/components/russound_rio/conftest.py | 7 +- tests/components/russound_rio/const.py | 12 ++-- .../russound_rio/snapshots/test_init.ambr | 2 +- .../russound_rio/test_config_flow.py | 65 ++++++++++++++++++- tests/components/russound_rio/test_init.py | 4 +- 8 files changed, 121 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/russound_rio/config_flow.py b/homeassistant/components/russound_rio/config_flow.py index 15d002b3f49..e5efd309a23 100644 --- a/homeassistant/components/russound_rio/config_flow.py +++ b/homeassistant/components/russound_rio/config_flow.py @@ -9,7 +9,11 @@ from typing import Any from aiorussound import RussoundClient, RussoundTcpConnectionHandler import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.helpers import config_validation as cv @@ -50,6 +54,12 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" else: await self.async_set_unique_id(controller.mac_address) + if self.source == SOURCE_RECONFIGURE: + self._abort_if_unique_id_mismatch(reason="wrong_device") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates=user_input, + ) self._abort_if_unique_id_configured() data = {CONF_HOST: host, CONF_PORT: port} return self.async_create_entry( @@ -60,6 +70,17 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + if not user_input: + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + ) + return await self.async_step_user(user_input) + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Attempt to import the existing configuration.""" self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 3a5e8f9adb7..63693ee6259 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -11,10 +11,7 @@ rules: brands: done common-modules: done config-flow-test-coverage: done - config-flow: - status: todo - comment: | - The data_description fields in translations are missing. + config-flow: done dependency-transparency: done docs-actions: status: exempt @@ -65,7 +62,7 @@ rules: diagnostics: done exception-translations: done icon-translations: todo - reconfiguration-flow: todo + reconfiguration-flow: done dynamic-devices: todo discovery-update-info: todo repair-issues: done diff --git a/homeassistant/components/russound_rio/strings.json b/homeassistant/components/russound_rio/strings.json index b8c29c08301..93544064e20 100644 --- a/homeassistant/components/russound_rio/strings.json +++ b/homeassistant/components/russound_rio/strings.json @@ -9,6 +9,21 @@ "host": "[%key:common::config_flow::data::host%]", "name": "[%key:common::config_flow::data::name%]", "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The IP address of the Russound controller.", + "port": "The port of the Russound controller." + } + }, + "reconfigure": { + "description": "Reconfigure your Russound controller.", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "[%key:component::russound_rio::config::step::user::data_description::host%]", + "port": "[%key:component::russound_rio::config::step::user::data_description::port%]" } } }, @@ -17,7 +32,9 @@ }, "abort": { "cannot_connect": "[%key:component::russound_rio::common::error_cannot_connect%]", - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "wrong_device": "This Russound controller does not match the existing device id. Please make sure you entered the correct IP address." } }, "issues": { diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index 5522c1e6ea2..3321d4160b9 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -9,9 +9,10 @@ from aiorussound.util import controller_device_str, zone_device_str import pytest from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant -from .const import API_VERSION, HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT +from .const import API_VERSION, HARDWARE_MAC, MOCK_CONFIG, MODEL from tests.common import MockConfigEntry, load_json_object_fixture @@ -68,7 +69,9 @@ def mock_russound_client() -> Generator[AsyncMock]: 1, "MCA-C5", client, controller_device_str(1), HARDWARE_MAC, None, zones ) } - client.connection_handler = RussoundTcpConnectionHandler(HOST, PORT) + client.connection_handler = RussoundTcpConnectionHandler( + MOCK_CONFIG[CONF_HOST], MOCK_CONFIG[CONF_PORT] + ) client.is_connected = Mock(return_value=True) client.unregister_state_update_callbacks.return_value = True client.rio_version = API_VERSION diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index 8f8ae7b59ea..18f75838525 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -3,16 +3,20 @@ from collections import namedtuple from homeassistant.components.media_player import DOMAIN as MP_DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT -HOST = "127.0.0.1" -PORT = 9621 MODEL = "MCA-C5" HARDWARE_MAC = "00:11:22:33:44:55" API_VERSION = "1.08.00" MOCK_CONFIG = { - "host": HOST, - "port": PORT, + CONF_HOST: "192.168.20.75", + CONF_PORT: 9621, +} + +MOCK_RECONFIGURATION_CONFIG = { + CONF_HOST: "192.168.20.70", + CONF_PORT: 9622, } _CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 diff --git a/tests/components/russound_rio/snapshots/test_init.ambr b/tests/components/russound_rio/snapshots/test_init.ambr index fcd59dd06f7..c92f06c4bc0 100644 --- a/tests/components/russound_rio/snapshots/test_init.ambr +++ b/tests/components/russound_rio/snapshots/test_init.ambr @@ -3,7 +3,7 @@ DeviceRegistryEntrySnapshot({ 'area_id': None, 'config_entries': , - 'configuration_url': 'http://127.0.0.1', + 'configuration_url': 'http://192.168.20.75', 'connections': set({ tuple( 'mac', diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py index 28cbf7eda5e..7a3b7fac7da 100644 --- a/tests/components/russound_rio/test_config_flow.py +++ b/tests/components/russound_rio/test_config_flow.py @@ -3,11 +3,12 @@ from unittest.mock import AsyncMock from homeassistant.components.russound_rio.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import MOCK_CONFIG, MODEL +from .const import MOCK_CONFIG, MOCK_RECONFIGURATION_CONFIG, MODEL from tests.common import MockConfigEntry @@ -117,3 +118,63 @@ async def test_import_cannot_connect( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" + + +async def _start_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure" + + return reconfigure_result + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_russound_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + + reconfigure_result = await _start_reconfigure_flow(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + MOCK_RECONFIGURATION_CONFIG, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.data == { + CONF_HOST: "192.168.20.70", + CONF_PORT: 9622, + } + + +async def test_reconfigure_unique_id_mismatch( + hass: HomeAssistant, + mock_russound_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure reconfigure flow aborts when the bride changes.""" + mock_russound_client.controllers[1].mac_address = "different_mac" + + reconfigure_result = await _start_reconfigure_flow(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + MOCK_RECONFIGURATION_CONFIG, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_device" diff --git a/tests/components/russound_rio/test_init.py b/tests/components/russound_rio/test_init.py index e7022fa6ac1..d654eea32bd 100644 --- a/tests/components/russound_rio/test_init.py +++ b/tests/components/russound_rio/test_init.py @@ -59,8 +59,8 @@ async def test_disconnect_reconnect_log( mock_russound_client.is_connected = Mock(return_value=False) await mock_state_update(mock_russound_client, CallbackType.CONNECTION) - assert "Disconnected from device at 127.0.0.1" in caplog.text + assert "Disconnected from device at 192.168.20.75" in caplog.text mock_russound_client.is_connected = Mock(return_value=True) await mock_state_update(mock_russound_client, CallbackType.CONNECTION) - assert "Reconnected to device at 127.0.0.1" in caplog.text + assert "Reconnected to device at 192.168.20.75" in caplog.text From 4c91d1b402a5cdd8c9251c0ee49ac4aa983e2bbd Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Wed, 18 Dec 2024 08:48:37 +0100 Subject: [PATCH 394/677] Add support for ACB batteries to Enphase Envoy (#131298) * Add support for ACB batteries to Enphase Envoy * Add tests for ACB battery support in ENphase Envoy * make acb state sensordeviceclass ENUM * Capitalize strings and use common idle --- .../components/enphase_envoy/sensor.py | 145 + .../components/enphase_envoy/strings.json | 24 + tests/components/enphase_envoy/conftest.py | 6 + .../fixtures/envoy_acb_batt.json | 274 + .../enphase_envoy/snapshots/test_sensor.ambr | 4854 +++++++++++++++++ tests/components/enphase_envoy/test_sensor.py | 101 + 6 files changed, 5404 insertions(+) create mode 100644 tests/components/enphase_envoy/fixtures/envoy_acb_batt.json diff --git a/homeassistant/components/enphase_envoy/sensor.py b/homeassistant/components/enphase_envoy/sensor.py index fadbf191840..62ae5b621ac 100644 --- a/homeassistant/components/enphase_envoy/sensor.py +++ b/homeassistant/components/enphase_envoy/sensor.py @@ -10,6 +10,8 @@ from operator import attrgetter from typing import TYPE_CHECKING from pyenphase import ( + EnvoyACBPower, + EnvoyBatteryAggregate, EnvoyEncharge, EnvoyEnchargeAggregate, EnvoyEnchargePower, @@ -723,6 +725,78 @@ ENCHARGE_AGGREGATE_SENSORS = ( ) +@dataclass(frozen=True, kw_only=True) +class EnvoyAcbBatterySensorEntityDescription(SensorEntityDescription): + """Describes an Envoy ACB Battery sensor entity.""" + + value_fn: Callable[[EnvoyACBPower], int | str] + + +ACB_BATTERY_POWER_SENSORS = ( + EnvoyAcbBatterySensorEntityDescription( + key="acb_power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + value_fn=attrgetter("power"), + ), + EnvoyAcbBatterySensorEntityDescription( + key="acb_soc", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.BATTERY, + value_fn=attrgetter("state_of_charge"), + ), + EnvoyAcbBatterySensorEntityDescription( + key="acb_battery_state", + translation_key="acb_battery_state", + device_class=SensorDeviceClass.ENUM, + options=["discharging", "idle", "charging", "full"], + value_fn=attrgetter("state"), + ), +) + +ACB_BATTERY_ENERGY_SENSORS = ( + EnvoyAcbBatterySensorEntityDescription( + key="acb_available_energy", + translation_key="acb_available_energy", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + device_class=SensorDeviceClass.ENERGY_STORAGE, + value_fn=attrgetter("charge_wh"), + ), +) + + +@dataclass(frozen=True, kw_only=True) +class EnvoyAggregateBatterySensorEntityDescription(SensorEntityDescription): + """Describes an Envoy aggregate Ensemble and ACB Battery sensor entity.""" + + value_fn: Callable[[EnvoyBatteryAggregate], int] + + +AGGREGATE_BATTERY_SENSORS = ( + EnvoyAggregateBatterySensorEntityDescription( + key="aggregated_soc", + translation_key="aggregated_soc", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.BATTERY, + value_fn=attrgetter("state_of_charge"), + ), + EnvoyAggregateBatterySensorEntityDescription( + key="aggregated_available_energy", + translation_key="aggregated_available_energy", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + device_class=SensorDeviceClass.ENERGY_STORAGE, + value_fn=attrgetter("available_energy"), + ), + EnvoyAggregateBatterySensorEntityDescription( + key="aggregated_max_battery_capacity", + translation_key="aggregated_max_capacity", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + device_class=SensorDeviceClass.ENERGY_STORAGE, + value_fn=attrgetter("max_available_capacity"), + ), +) + + async def async_setup_entry( hass: HomeAssistant, config_entry: EnphaseConfigEntry, @@ -847,6 +921,20 @@ async def async_setup_entry( EnvoyEnpowerEntity(coordinator, description) for description in ENPOWER_SENSORS ) + if envoy_data.acb_power: + entities.extend( + EnvoyAcbBatteryPowerEntity(coordinator, description) + for description in ACB_BATTERY_POWER_SENSORS + ) + entities.extend( + EnvoyAcbBatteryEnergyEntity(coordinator, description) + for description in ACB_BATTERY_ENERGY_SENSORS + ) + if envoy_data.battery_aggregate: + entities.extend( + AggregateBatteryEntity(coordinator, description) + for description in AGGREGATE_BATTERY_SENSORS + ) async_add_entities(entities) @@ -1228,3 +1316,60 @@ class EnvoyEnpowerEntity(EnvoySensorBaseEntity): enpower = self.data.enpower assert enpower is not None return self.entity_description.value_fn(enpower) + + +class EnvoyAcbBatteryPowerEntity(EnvoySensorBaseEntity): + """Envoy ACB Battery power sensor entity.""" + + entity_description: EnvoyAcbBatterySensorEntityDescription + + def __init__( + self, + coordinator: EnphaseUpdateCoordinator, + description: EnvoyAcbBatterySensorEntityDescription, + ) -> None: + """Initialize ACB Battery entity.""" + super().__init__(coordinator, description) + acb_data = self.data.acb_power + assert acb_data is not None + self._attr_unique_id = f"{self.envoy_serial_num}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, f"{self.envoy_serial_num}_acb")}, + manufacturer="Enphase", + model="ACB", + name=f"ACB {self.envoy_serial_num}", + via_device=(DOMAIN, self.envoy_serial_num), + ) + + @property + def native_value(self) -> int | str | None: + """Return the state of the ACB Battery power sensors.""" + acb = self.data.acb_power + assert acb is not None + return self.entity_description.value_fn(acb) + + +class EnvoyAcbBatteryEnergyEntity(EnvoySystemSensorEntity): + """Envoy combined ACB and Ensemble Battery Aggregate energy sensor entity.""" + + entity_description: EnvoyAcbBatterySensorEntityDescription + + @property + def native_value(self) -> int | str: + """Return the state of the aggregate energy sensors.""" + acb = self.data.acb_power + assert acb is not None + return self.entity_description.value_fn(acb) + + +class AggregateBatteryEntity(EnvoySystemSensorEntity): + """Envoy combined ACB and Ensemble Battery Aggregate sensor entity.""" + + entity_description: EnvoyAggregateBatterySensorEntityDescription + + @property + def native_value(self) -> int: + """Return the state of the aggregate sensors.""" + battery_aggregate = self.data.battery_aggregate + assert battery_aggregate is not None + return self.entity_description.value_fn(battery_aggregate) diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index 2d91b3b0960..a338deb9638 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -337,6 +337,30 @@ }, "configured_reserve_soc": { "name": "Configured reserve battery level" + }, + "acb_battery_state": { + "name": "Battery state", + "state": { + "discharging": "Discharging", + "idle": "[%key:common::state::idle%]", + "charging": "Charging", + "full": "Full" + } + }, + "acb_available_energy": { + "name": "Available ACB battery energy" + }, + "acb_max_capacity": { + "name": "ACB Battery capacity" + }, + "aggregated_available_energy": { + "name": "Aggregated available battery energy" + }, + "aggregated_max_capacity": { + "name": "Aggregated Battery capacity" + }, + "aggregated_soc": { + "name": "Aggregated battery soc" } }, "switch": { diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index 541b6f96e19..b860d49aa6b 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -6,6 +6,8 @@ from unittest.mock import AsyncMock, Mock, patch import jwt from pyenphase import ( + EnvoyACBPower, + EnvoyBatteryAggregate, EnvoyData, EnvoyEncharge, EnvoyEnchargeAggregate, @@ -172,6 +174,8 @@ def _load_json_2_production_data( mocked_data.system_production_phases[sub_item] = EnvoySystemProduction( **item_data ) + if item := json_fixture["data"].get("acb_power"): + mocked_data.acb_power = EnvoyACBPower(**item) def _load_json_2_meter_data( @@ -245,6 +249,8 @@ def _load_json_2_encharge_enpower_data( mocked_data.dry_contact_settings[sub_item] = EnvoyDryContactSettings( **item_data ) + if item := json_fixture["data"].get("battery_aggregate"): + mocked_data.battery_aggregate = EnvoyBatteryAggregate(**item) def _load_json_2_raw_data(mocked_data: EnvoyData, json_fixture: dict[str, Any]) -> None: diff --git a/tests/components/enphase_envoy/fixtures/envoy_acb_batt.json b/tests/components/enphase_envoy/fixtures/envoy_acb_batt.json new file mode 100644 index 00000000000..618b40027b8 --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_acb_batt.json @@ -0,0 +1,274 @@ +{ + "serial_number": "1234", + "firmware": "7.6.358", + "part_number": "800-00654-r08", + "envoy_model": "Envoy, phases: 3, phase mode: three, net-consumption CT, production CT", + "supported_features": 1759, + "phase_mode": "three", + "phase_count": 3, + "active_phase_count": 0, + "ct_meter_count": 2, + "consumption_meter_type": "net-consumption", + "production_meter_type": "production", + "storage_meter_type": null, + "data": { + "encharge_inventory": { + "123456": { + "admin_state": 6, + "admin_state_str": "ENCHG_STATE_READY", + "bmu_firmware_version": "2.1.16", + "comm_level_2_4_ghz": 4, + "comm_level_sub_ghz": 4, + "communicating": true, + "dc_switch_off": false, + "encharge_capacity": 3500, + "encharge_revision": 2, + "firmware_loaded_date": 1714736645, + "firmware_version": "2.6.6618_rel/22.11", + "installed_date": 1714736645, + "last_report_date": 1714804173, + "led_status": 17, + "max_cell_temp": 16, + "operating": true, + "part_number": "830-01760-r46", + "percent_full": 54, + "serial_number": "122327081322", + "temperature": 16, + "temperature_unit": "C", + "zigbee_dongle_fw_version": "100F" + } + }, + "encharge_power": { + "123456": { + "apparent_power_mva": 105, + "real_power_mw": 105, + "soc": 54 + } + }, + "encharge_aggregate": { + "available_energy": 1890, + "backup_reserve": 0, + "state_of_charge": 54, + "reserve_state_of_charge": 0, + "configured_reserve_state_of_charge": 0, + "max_available_capacity": 3500 + }, + "enpower": null, + "acb_power": { + "power": 260, + "charge_wh": 930, + "state_of_charge": 25, + "state": "discharging", + "batteries": 3 + }, + "battery_aggregate": { + "available_energy": 2820, + "state_of_charge": 39, + "max_available_capacity": 7220 + }, + "system_consumption": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": null, + "system_production_phases": null, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, + "system_net_consumption_phases": null, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": null, + "ctmeter_production_phases": { + "L1": { + "eid": "100000011", + "timestamp": 1708006111, + "energy_delivered": 112341, + "energy_received": 123451, + "active_power": 20, + "power_factor": 0.12, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance"] + }, + "L2": { + "eid": "100000012", + "timestamp": 1708006112, + "energy_delivered": 112342, + "energy_received": 123452, + "active_power": 30, + "power_factor": 0.13, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["power-on-unused-phase"] + }, + "L3": { + "eid": "100000013", + "timestamp": 1708006113, + "energy_delivered": 112343, + "energy_received": 123453, + "active_power": 50, + "power_factor": 0.14, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_consumption_phases": { + "L1": { + "eid": "100000021", + "timestamp": 1708006121, + "energy_delivered": 212341, + "energy_received": 223451, + "active_power": 21, + "power_factor": 0.22, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L2": { + "eid": "100000022", + "timestamp": 1708006122, + "energy_delivered": 212342, + "energy_received": 223452, + "active_power": 31, + "power_factor": 0.23, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L3": { + "eid": "100000023", + "timestamp": 1708006123, + "energy_delivered": 212343, + "energy_received": 223453, + "active_power": 51, + "power_factor": 0.24, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1714749724", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 0.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1714749724" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "all_year_long", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 0, + "must_charge_duration": 0, + "must_charge_mode": "CP", + "enable_discharge_to_grid": false, + "periods": [ + { + "id": "period_1", + "start": 0, + "rate": 0.0 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index c43325a639d..d6a523a3e15 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -1838,6 +1838,4860 @@ 'state': '1970-01-01T00:00:01+00:00', }) # --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.acb_1234_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_acb_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'ACB 1234 Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.acb_1234_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_battery_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'discharging', + 'idle', + 'charging', + 'full', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.acb_1234_battery_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery state', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'acb_battery_state', + 'unique_id': '1234_acb_battery_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_battery_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'ACB 1234 Battery state', + 'options': list([ + 'discharging', + 'idle', + 'charging', + 'full', + ]), + }), + 'context': , + 'entity_id': 'sensor.acb_1234_battery_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'discharging', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.acb_1234_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_acb_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.acb_1234_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'ACB 1234 Power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.acb_1234_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '260', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_apparent_power_mva', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Encharge 123456 Apparent power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.105', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Encharge 123456 Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '123456_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Encharge 123456 Last reported', + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-05-04T06:29:33+00:00', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_real_power_mw', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Encharge 123456 Power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.105', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.encharge_123456_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Encharge 123456 Temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_available_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_aggregated_available_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Aggregated available battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aggregated_available_energy', + 'unique_id': '1234_aggregated_available_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_available_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Envoy 1234 Aggregated available battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_aggregated_available_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2820', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_battery_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_aggregated_battery_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Aggregated Battery capacity', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aggregated_max_capacity', + 'unique_id': '1234_aggregated_max_battery_capacity', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_battery_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Envoy 1234 Aggregated Battery capacity', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_aggregated_battery_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7220', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_battery_soc-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_aggregated_battery_soc', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Aggregated battery soc', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aggregated_soc', + 'unique_id': '1234_aggregated_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_aggregated_battery_soc-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Aggregated battery soc', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_aggregated_battery_soc', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '39', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_available_acb_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_available_acb_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Available ACB battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'acb_available_energy', + 'unique_id': '1234_acb_available_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_available_acb_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Envoy 1234 Available ACB battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_available_acb_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '930', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_available_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_available_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Available battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'available_energy', + 'unique_id': '1234_available_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_available_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Available battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_available_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1890', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Battery', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_battery_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_battery_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Battery capacity', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_capacity', + 'unique_id': '1234_max_capacity', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_battery_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Battery capacity', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_battery_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3500', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '1234_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.101', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.031', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.051', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '1234_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '1234_seven_days_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '1234_daily_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '1234_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_frequency_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '1234_lifetime_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '1234_lifetime_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021234', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212341', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212342', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212343', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '1234_lifetime_net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022345', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223451', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223452', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_lifetime_net_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223453', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '1234_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '1234_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_metering_status_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current', + 'unique_id': '1234_net_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_net_consumption_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor', + 'unique_id': '1234_net_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.21', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.22', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.24', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.12', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.13', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.14', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_production_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_reserve_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Reserve battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_energy', + 'unique_id': '1234_reserve_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_reserve_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Reserve battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_reserve_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_reserve_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Reserve battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_soc', + 'unique_id': '1234_reserve_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_reserve_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Reserve battery level', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_reserve_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '1234_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_voltage_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_acb_batt][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- # name: test_sensor[envoy_eu_batt][sensor.encharge_123456_apparent_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/enphase_envoy/test_sensor.py b/tests/components/enphase_envoy/test_sensor.py index 784dfe54073..89f28c74514 100644 --- a/tests/components/enphase_envoy/test_sensor.py +++ b/tests/components/enphase_envoy/test_sensor.py @@ -31,6 +31,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_plat "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", "envoy_tot_cons_metered", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -65,6 +66,7 @@ PRODUCTION_NAMES: tuple[str, ...] = ( "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", "envoy_tot_cons_metered", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -154,6 +156,7 @@ CONSUMPTION_NAMES: tuple[str, ...] = ( "envoy_eu_batt", "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -197,6 +200,7 @@ NET_CONSUMPTION_NAMES: tuple[str, ...] = ( "envoy_metered_batt_relay", "envoy_nobatt_metered_3p", "envoy_tot_cons_metered", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -803,6 +807,7 @@ async def test_sensor_inverter_disabled_by_integration( ("mock_envoy"), [ "envoy_metered_batt_relay", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -873,6 +878,7 @@ async def test_sensor_encharge_enpower_data( ("mock_envoy"), [ "envoy_metered_batt_relay", + "envoy_acb_batt", ], indirect=["mock_envoy"], ) @@ -930,6 +936,101 @@ async def test_sensor_encharge_power_data( ) +ACB_POWER_INT_NAMES: tuple[str, ...] = ( + "power", + "battery", +) +ACB_POWER_STR_NAMES: tuple[str, ...] = ("battery_state",) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_acb_batt", + ], + indirect=["mock_envoy"], +) +async def test_sensor_acb_power_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, +) -> None: + """Test enphase_envoy acb battery power entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.acb_{sn}" + + data = mock_envoy.data.acb_power + ACB_POWER_INT_TARGETS: tuple[int, ...] = ( + data.power, + data.state_of_charge, + ) + ACB_POWER_STR_TARGETS: tuple[int, ...] = (data.state,) + + for name, target in list( + zip(ACB_POWER_INT_NAMES, ACB_POWER_INT_TARGETS, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert int(entity_state.state) == target + + for name, target in list( + zip(ACB_POWER_STR_NAMES, ACB_POWER_STR_TARGETS, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +AGGREGATED_BATTERY_NAMES: tuple[str, ...] = ( + "aggregated_battery_soc", + "aggregated_available_battery_energy", + "aggregated_battery_capacity", +) +AGGREGATED_ACB_BATTERY_NAMES: tuple[str, ...] = ("available_acb_battery_energy",) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_acb_batt", + ], + indirect=["mock_envoy"], +) +async def test_sensor_aggegated_battery_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, +) -> None: + """Test enphase_envoy aggregated batteries entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.battery_aggregate + AGGREGATED_TARGETS: tuple[int, ...] = ( + data.state_of_charge, + data.available_energy, + data.max_available_capacity, + ) + + for name, target in list( + zip(AGGREGATED_BATTERY_NAMES, AGGREGATED_TARGETS, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert int(entity_state.state) == target + + data = mock_envoy.data.acb_power + AGGREGATED_ACB_TARGETS: tuple[int, ...] = (data.charge_wh,) + for name, target in list( + zip(AGGREGATED_ACB_BATTERY_NAMES, AGGREGATED_ACB_TARGETS, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert int(entity_state.state) == target + + def integration_disabled_entities( entity_registry: er.EntityRegistry, config_entry: MockConfigEntry ) -> list[str]: From a449ca65be6013a2ec70dc7a9560b339e10b0404 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 18 Dec 2024 03:33:17 -0500 Subject: [PATCH 395/677] Improve test coverage for Russound RIO (#133096) * Improve test coverage for Russound RIO * Update * Update --- .../russound_rio/quality_scale.yaml | 2 +- tests/components/russound_rio/conftest.py | 49 +++++-- tests/components/russound_rio/const.py | 5 - .../russound_rio/fixtures/get_zones.json | 54 +++++--- .../russound_rio/test_media_player.py | 126 ++++++++++++++++++ 5 files changed, 200 insertions(+), 36 deletions(-) diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 63693ee6259..6edf439cae6 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -38,7 +38,7 @@ rules: comment: | This integration does not require authentication. parallel-updates: done - test-coverage: todo + test-coverage: done integration-owner: done docs-installation-parameters: todo docs-configuration-parameters: diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index 3321d4160b9..b9e6e89812a 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -47,27 +47,54 @@ def mock_russound_client() -> Generator[AsyncMock]: ), ): client = mock_client.return_value - zones = { - int(k): ZoneControlSurface.from_dict(v) - for k, v in load_json_object_fixture("get_zones.json", DOMAIN).items() + controller_zones = { + int(controller_id): { + int(zone_id): ZoneControlSurface.from_dict(zone) + for zone_id, zone in v["zones"].items() + } + for controller_id, v in load_json_object_fixture("get_zones.json", DOMAIN)[ + "controllers" + ].items() } client.sources = { int(k): Source.from_dict(v) for k, v in load_json_object_fixture("get_sources.json", DOMAIN).items() } client.state = load_json_object_fixture("get_state.json", DOMAIN) - for k, v in zones.items(): - v.device_str = zone_device_str(1, k) - v.fetch_current_source = Mock( - side_effect=lambda current_source=v.current_source: client.sources.get( - int(current_source) + for controller_id, zones in controller_zones.items(): + for zone_id, zone in zones.items(): + zone.device_str = zone_device_str(controller_id, zone_id) + zone.fetch_current_source = Mock( + side_effect=lambda current_source=zone.current_source: client.sources.get( + int(current_source) + ) ) - ) + zone.volume_up = AsyncMock() + zone.volume_down = AsyncMock() + zone.set_volume = AsyncMock() + zone.zone_on = AsyncMock() + zone.zone_off = AsyncMock() + zone.select_source = AsyncMock() client.controllers = { 1: Controller( - 1, "MCA-C5", client, controller_device_str(1), HARDWARE_MAC, None, zones - ) + 1, + MODEL, + client, + controller_device_str(1), + HARDWARE_MAC, + None, + controller_zones[1], + ), + 2: Controller( + 2, + MODEL, + client, + controller_device_str(2), + None, + None, + controller_zones[2], + ), } client.connection_handler = RussoundTcpConnectionHandler( MOCK_CONFIG[CONF_HOST], MOCK_CONFIG[CONF_PORT] diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index 18f75838525..8269e825e33 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -1,7 +1,5 @@ """Constants for russound_rio tests.""" -from collections import namedtuple - from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT @@ -19,9 +17,6 @@ MOCK_RECONFIGURATION_CONFIG = { CONF_PORT: 9622, } -_CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 -MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} - DEVICE_NAME = "mca_c5" NAME_ZONE_1 = "backyard" ENTITY_ID_ZONE_1 = f"{MP_DOMAIN}.{DEVICE_NAME}_{NAME_ZONE_1}" diff --git a/tests/components/russound_rio/fixtures/get_zones.json b/tests/components/russound_rio/fixtures/get_zones.json index 396310339b3..e1077944593 100644 --- a/tests/components/russound_rio/fixtures/get_zones.json +++ b/tests/components/russound_rio/fixtures/get_zones.json @@ -1,22 +1,38 @@ { - "1": { - "name": "Backyard", - "volume": "10", - "status": "ON", - "enabled": "True", - "current_source": "1" - }, - "2": { - "name": "Kitchen", - "volume": "50", - "status": "OFF", - "enabled": "True", - "current_source": "2" - }, - "3": { - "name": "Bedroom", - "volume": "10", - "status": "OFF", - "enabled": "False" + "controllers": { + "1": { + "zones": { + "1": { + "name": "Backyard", + "volume": "10", + "status": "ON", + "enabled": "True", + "current_source": "1" + }, + "2": { + "name": "Kitchen", + "volume": "50", + "status": "OFF", + "enabled": "True", + "current_source": "2" + }, + "3": { + "name": "Bedroom", + "volume": "10", + "status": "OFF", + "enabled": "False" + } + } + }, + "2": { + "zones": { + "9": { + "name": "Living Room", + "volume": "10", + "status": "OFF", + "enabled": "True" + } + } + } } } diff --git a/tests/components/russound_rio/test_media_player.py b/tests/components/russound_rio/test_media_player.py index c740ec4f39e..1ff87ee8b0e 100644 --- a/tests/components/russound_rio/test_media_player.py +++ b/tests/components/russound_rio/test_media_player.py @@ -2,10 +2,23 @@ from unittest.mock import AsyncMock +from aiorussound.exceptions import CommandError from aiorussound.models import PlayStatus import pytest +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_MEDIA_VOLUME_LEVEL, + DOMAIN as MP_DOMAIN, + SERVICE_SELECT_SOURCE, +) from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, STATE_BUFFERING, STATE_IDLE, STATE_OFF, @@ -14,6 +27,7 @@ from homeassistant.const import ( STATE_PLAYING, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from . import mock_state_update, setup_integration from .const import ENTITY_ID_ZONE_1 @@ -50,3 +64,115 @@ async def test_entity_state( state = hass.states.get(ENTITY_ID_ZONE_1) assert state.state == media_player_state + + +async def test_media_volume( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, +) -> None: + """Test volume service.""" + await setup_integration(hass, mock_config_entry) + + # Test volume up + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1}, + blocking=True, + ) + + mock_russound_client.controllers[1].zones[1].volume_up.assert_called_once() + + # Test volume down + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_DOWN, + {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1}, + blocking=True, + ) + + mock_russound_client.controllers[1].zones[1].volume_down.assert_called_once() + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1, ATTR_MEDIA_VOLUME_LEVEL: 0.30}, + blocking=True, + ) + + mock_russound_client.controllers[1].zones[1].set_volume.assert_called_once_with( + "15" + ) + + +@pytest.mark.parametrize( + ("source_name", "source_id"), + [ + ("Aux", 1), + ("Spotify", 2), + ], +) +async def test_source_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, + source_name: str, + source_id: int, +) -> None: + """Test source service.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SELECT_SOURCE, + {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1, ATTR_INPUT_SOURCE: source_name}, + blocking=True, + ) + + mock_russound_client.controllers[1].zones[1].select_source.assert_called_once_with( + source_id + ) + + +async def test_invalid_source_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, +) -> None: + """Test source service with invalid source ID.""" + await setup_integration(hass, mock_config_entry) + + mock_russound_client.controllers[1].zones[ + 1 + ].select_source.side_effect = CommandError + + with pytest.raises( + HomeAssistantError, + match="Error executing async_select_source on entity media_player.mca_c5_backyard", + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SELECT_SOURCE, + {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1, ATTR_INPUT_SOURCE: "Aux"}, + blocking=True, + ) + + +async def test_power_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, +) -> None: + """Test power service.""" + await setup_integration(hass, mock_config_entry) + + data = {ATTR_ENTITY_ID: ENTITY_ID_ZONE_1} + + await hass.services.async_call(MP_DOMAIN, SERVICE_TURN_ON, data, blocking=True) + + mock_russound_client.controllers[1].zones[1].zone_on.assert_called_once() + + await hass.services.async_call(MP_DOMAIN, SERVICE_TURN_OFF, data, blocking=True) + + mock_russound_client.controllers[1].zones[1].zone_off.assert_called_once() From c1cf0e23b2c11d7de581f7d3bf9a59d241db4236 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Wed, 18 Dec 2024 10:10:42 +0100 Subject: [PATCH 396/677] Lift SABnzbd to bronze quality scale (#133453) --- homeassistant/components/sabnzbd/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/sabnzbd/manifest.json b/homeassistant/components/sabnzbd/manifest.json index afc35a2340e..f1b8a17134b 100644 --- a/homeassistant/components/sabnzbd/manifest.json +++ b/homeassistant/components/sabnzbd/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/sabnzbd", "iot_class": "local_polling", "loggers": ["pysabnzbd"], + "quality_scale": "bronze", "requirements": ["pysabnzbd==1.1.1"] } From 413a578fdbc7986d005c53690a1aaca82a50acb5 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 10:19:57 +0100 Subject: [PATCH 397/677] Bump pyOverkiz to 1.15.3 (#133458) --- homeassistant/components/overkiz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 8c750aec6bd..9ab901d5005 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -20,7 +20,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.15.0"], + "requirements": ["pyoverkiz==1.15.3"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 37504e5ec41..b01683cbf76 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2162,7 +2162,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.0 +pyoverkiz==1.15.3 # homeassistant.components.onewire pyownet==0.10.0.post1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 55bb0e6ac1f..5b339e061b1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1755,7 +1755,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.0 +pyoverkiz==1.15.3 # homeassistant.components.onewire pyownet==0.10.0.post1 From 5fb5e933e2759d3e71b76deeab850fdd9986806f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Wed, 18 Dec 2024 09:20:14 +0000 Subject: [PATCH 398/677] Use a common base entity for Idasen Desk (#132496) Co-authored-by: Joost Lekkerkerker --- .../components/idasen_desk/__init__.py | 46 +++++-------------- .../components/idasen_desk/button.py | 35 ++++++-------- .../components/idasen_desk/coordinator.py | 10 ++-- homeassistant/components/idasen_desk/cover.py | 36 ++++----------- .../components/idasen_desk/entity.py | 34 ++++++++++++++ .../components/idasen_desk/quality_scale.yaml | 7 +-- .../components/idasen_desk/sensor.py | 32 +++---------- 7 files changed, 82 insertions(+), 118 deletions(-) create mode 100644 homeassistant/components/idasen_desk/entity.py diff --git a/homeassistant/components/idasen_desk/__init__.py b/homeassistant/components/idasen_desk/__init__.py index 56a377ac2df..1aacea91723 100644 --- a/homeassistant/components/idasen_desk/__init__.py +++ b/homeassistant/components/idasen_desk/__init__.py @@ -4,53 +4,31 @@ from __future__ import annotations import logging -from attr import dataclass from bleak.exc import BleakError from idasen_ha.errors import AuthFailedError from homeassistant.components import bluetooth from homeassistant.components.bluetooth.match import ADDRESS, BluetoothCallbackMatcher from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_NAME, - CONF_ADDRESS, - EVENT_HOMEASSISTANT_STOP, - Platform, -) +from homeassistant.const import CONF_ADDRESS, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo -from .const import DOMAIN from .coordinator import IdasenDeskCoordinator PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.COVER, Platform.SENSOR] _LOGGER = logging.getLogger(__name__) - -@dataclass -class DeskData: - """Data for the Idasen Desk integration.""" - - address: str - device_info: DeviceInfo - coordinator: IdasenDeskCoordinator +type IdasenDeskConfigEntry = ConfigEntry[IdasenDeskCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: IdasenDeskConfigEntry) -> bool: """Set up IKEA Idasen from a config entry.""" address: str = entry.data[CONF_ADDRESS].upper() coordinator = IdasenDeskCoordinator(hass, _LOGGER, entry.title, address) - device_info = DeviceInfo( - name=entry.title, - connections={(dr.CONNECTION_BLUETOOTH, address)}, - ) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = DeskData( - address, device_info, coordinator - ) + entry.runtime_data = coordinator try: if not await coordinator.async_connect(): @@ -89,18 +67,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_update_listener( + hass: HomeAssistant, entry: IdasenDeskConfigEntry +) -> None: """Handle options update.""" - data: DeskData = hass.data[DOMAIN][entry.entry_id] - if entry.title != data.device_info[ATTR_NAME]: - await hass.config_entries.async_reload(entry.entry_id) + await hass.config_entries.async_reload(entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: IdasenDeskConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - data: DeskData = hass.data[DOMAIN].pop(entry.entry_id) - await data.coordinator.async_disconnect() - bluetooth.async_rediscover_address(hass, data.address) + coordinator = entry.runtime_data + await coordinator.async_disconnect() + bluetooth.async_rediscover_address(hass, coordinator.address) return unload_ok diff --git a/homeassistant/components/idasen_desk/button.py b/homeassistant/components/idasen_desk/button.py index 0de3125576d..cd7553da1ac 100644 --- a/homeassistant/components/idasen_desk/button.py +++ b/homeassistant/components/idasen_desk/button.py @@ -6,14 +6,12 @@ import logging from typing import Any, Final from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DeskData, IdasenDeskCoordinator -from .const import DOMAIN +from . import IdasenDeskConfigEntry, IdasenDeskCoordinator +from .entity import IdasenDeskEntity _LOGGER = logging.getLogger(__name__) @@ -45,43 +43,38 @@ BUTTONS: Final = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: IdasenDeskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set buttons for device.""" - data: DeskData = hass.data[DOMAIN][entry.entry_id] - async_add_entities( - IdasenDeskButton(data.address, data.device_info, data.coordinator, button) - for button in BUTTONS - ) + coordinator = entry.runtime_data + async_add_entities(IdasenDeskButton(coordinator, button) for button in BUTTONS) -class IdasenDeskButton(ButtonEntity): +class IdasenDeskButton(IdasenDeskEntity, ButtonEntity): """Defines a IdasenDesk button.""" entity_description: IdasenDeskButtonDescription - _attr_has_entity_name = True def __init__( self, - address: str, - device_info: DeviceInfo, coordinator: IdasenDeskCoordinator, description: IdasenDeskButtonDescription, ) -> None: """Initialize the IdasenDesk button entity.""" + super().__init__(f"{description.key}-{coordinator.address}", coordinator) self.entity_description = description - self._attr_unique_id = f"{description.key}-{address}" - self._attr_device_info = device_info - self._address = address - self._coordinator = coordinator - async def async_press(self) -> None: """Triggers the IdasenDesk button press service.""" _LOGGER.debug( "Trigger %s for %s", self.entity_description.key, - self._address, + self.coordinator.address, ) - await self.entity_description.press_action(self._coordinator)() + await self.entity_description.press_action(self.coordinator)() + + @property + def available(self) -> bool: + """Connect/disconnect buttons should always be available.""" + return True diff --git a/homeassistant/components/idasen_desk/coordinator.py b/homeassistant/components/idasen_desk/coordinator.py index 0661f2dede1..a84027a26c0 100644 --- a/homeassistant/components/idasen_desk/coordinator.py +++ b/homeassistant/components/idasen_desk/coordinator.py @@ -26,20 +26,20 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]): """Init IdasenDeskCoordinator.""" super().__init__(hass, logger, name=name) - self._address = address + self.address = address self._expected_connected = False self.desk = Desk(self.async_set_updated_data) async def async_connect(self) -> bool: """Connect to desk.""" - _LOGGER.debug("Trying to connect %s", self._address) + _LOGGER.debug("Trying to connect %s", self.address) self._expected_connected = True ble_device = bluetooth.async_ble_device_from_address( - self.hass, self._address, connectable=True + self.hass, self.address, connectable=True ) if ble_device is None: - _LOGGER.debug("No BLEDevice for %s", self._address) + _LOGGER.debug("No BLEDevice for %s", self.address) return False await self.desk.connect(ble_device) return True @@ -47,7 +47,7 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]): async def async_disconnect(self) -> None: """Disconnect from desk.""" self._expected_connected = False - _LOGGER.debug("Disconnecting from %s", self._address) + _LOGGER.debug("Disconnecting from %s", self.address) await self.desk.disconnect() async def async_connect_if_expected(self) -> None: diff --git a/homeassistant/components/idasen_desk/cover.py b/homeassistant/components/idasen_desk/cover.py index eb6bf5523de..95474ea8750 100644 --- a/homeassistant/components/idasen_desk/cover.py +++ b/homeassistant/components/idasen_desk/cover.py @@ -12,30 +12,25 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import DeskData, IdasenDeskCoordinator -from .const import DOMAIN +from . import IdasenDeskConfigEntry, IdasenDeskCoordinator +from .entity import IdasenDeskEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: IdasenDeskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the cover platform for Idasen Desk.""" - data: DeskData = hass.data[DOMAIN][entry.entry_id] - async_add_entities( - [IdasenDeskCover(data.address, data.device_info, data.coordinator)] - ) + coordinator = entry.runtime_data + async_add_entities([IdasenDeskCover(coordinator)]) -class IdasenDeskCover(CoordinatorEntity[IdasenDeskCoordinator], CoverEntity): +class IdasenDeskCover(IdasenDeskEntity, CoverEntity): """Representation of Idasen Desk device.""" _attr_device_class = CoverDeviceClass.DAMPER @@ -45,29 +40,14 @@ class IdasenDeskCover(CoordinatorEntity[IdasenDeskCoordinator], CoverEntity): | CoverEntityFeature.STOP | CoverEntityFeature.SET_POSITION ) - _attr_has_entity_name = True _attr_name = None _attr_translation_key = "desk" - def __init__( - self, - address: str, - device_info: DeviceInfo, - coordinator: IdasenDeskCoordinator, - ) -> None: + def __init__(self, coordinator: IdasenDeskCoordinator) -> None: """Initialize an Idasen Desk cover.""" - super().__init__(coordinator) - self._desk = coordinator.desk - self._attr_unique_id = address - self._attr_device_info = device_info - + super().__init__(coordinator.address, coordinator) self._attr_current_cover_position = self._desk.height_percent - @property - def available(self) -> bool: - """Return True if entity is available.""" - return super().available and self._desk.is_connected is True - @property def is_closed(self) -> bool: """Return if the cover is closed.""" diff --git a/homeassistant/components/idasen_desk/entity.py b/homeassistant/components/idasen_desk/entity.py new file mode 100644 index 00000000000..bda7afd528c --- /dev/null +++ b/homeassistant/components/idasen_desk/entity.py @@ -0,0 +1,34 @@ +"""Base entity for Idasen Desk.""" + +from __future__ import annotations + +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import IdasenDeskCoordinator + + +class IdasenDeskEntity(CoordinatorEntity[IdasenDeskCoordinator]): + """IdasenDesk sensor.""" + + _attr_has_entity_name = True + + def __init__( + self, + unique_id: str, + coordinator: IdasenDeskCoordinator, + ) -> None: + """Initialize the IdasenDesk sensor entity.""" + super().__init__(coordinator) + + self._attr_unique_id = unique_id + self._attr_device_info = dr.DeviceInfo( + manufacturer="LINAK", + connections={(dr.CONNECTION_BLUETOOTH, coordinator.address)}, + ) + self._desk = coordinator.desk + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return super().available and self._desk.is_connected is True diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml index 1b9ec8cd810..1908178ec15 100644 --- a/homeassistant/components/idasen_desk/quality_scale.yaml +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -9,10 +9,7 @@ rules: comment: | This integration does not use polling. brands: done - common-modules: - status: todo - comment: | - The cover and sensor entities could move common initialization to a base entity class. + common-modules: done config-flow-test-coverage: status: todo comment: | @@ -33,7 +30,7 @@ rules: entity-event-setup: done entity-unique-id: done has-entity-name: done - runtime-data: todo + runtime-data: done test-before-configure: done test-before-setup: done unique-config-entry: done diff --git a/homeassistant/components/idasen_desk/sensor.py b/homeassistant/components/idasen_desk/sensor.py index 8ed85d21a34..d4f629b85a8 100644 --- a/homeassistant/components/idasen_desk/sensor.py +++ b/homeassistant/components/idasen_desk/sensor.py @@ -6,7 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass from typing import Any -from homeassistant import config_entries from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -15,12 +14,10 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import UnitOfLength from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import DeskData, IdasenDeskCoordinator -from .const import DOMAIN +from . import IdasenDeskConfigEntry, IdasenDeskCoordinator +from .entity import IdasenDeskEntity @dataclass(frozen=True, kw_only=True) @@ -46,51 +43,36 @@ SENSORS = ( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: IdasenDeskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Idasen Desk sensors.""" - data: DeskData = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( - IdasenDeskSensor( - data.address, data.device_info, data.coordinator, sensor_description - ) + IdasenDeskSensor(coordinator, sensor_description) for sensor_description in SENSORS ) -class IdasenDeskSensor(CoordinatorEntity[IdasenDeskCoordinator], SensorEntity): +class IdasenDeskSensor(IdasenDeskEntity, SensorEntity): """IdasenDesk sensor.""" entity_description: IdasenDeskSensorDescription - _attr_has_entity_name = True def __init__( self, - address: str, - device_info: DeviceInfo, coordinator: IdasenDeskCoordinator, description: IdasenDeskSensorDescription, ) -> None: """Initialize the IdasenDesk sensor entity.""" - super().__init__(coordinator) + super().__init__(f"{description.key}-{coordinator.address}", coordinator) self.entity_description = description - self._attr_unique_id = f"{description.key}-{address}" - self._attr_device_info = device_info - self._address = address - self._desk = coordinator.desk - async def async_added_to_hass(self) -> None: """When entity is added to hass.""" await super().async_added_to_hass() self._update_native_value() - @property - def available(self) -> bool: - """Return True if entity is available.""" - return super().available and self._desk.is_connected is True - @callback def _handle_coordinator_update(self, *args: Any) -> None: """Handle data update.""" From 39d781905de5bdce7325092427fc81969b57d4e2 Mon Sep 17 00:00:00 2001 From: Tomer Shemesh Date: Wed, 18 Dec 2024 04:21:37 -0500 Subject: [PATCH 399/677] Add ssdp discovery to Onkyo (#131066) --- CODEOWNERS | 4 +- homeassistant/components/onkyo/config_flow.py | 45 ++++++ homeassistant/components/onkyo/manifest.json | 42 ++++- homeassistant/generated/ssdp.py | 38 +++++ tests/components/onkyo/test_config_flow.py | 147 ++++++++++++++++++ 5 files changed, 272 insertions(+), 4 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index f1c6aa4aea5..8effcc49336 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1066,8 +1066,8 @@ build.json @home-assistant/supervisor /tests/components/ondilo_ico/ @JeromeHXP /homeassistant/components/onewire/ @garbled1 @epenet /tests/components/onewire/ @garbled1 @epenet -/homeassistant/components/onkyo/ @arturpragacz -/tests/components/onkyo/ @arturpragacz +/homeassistant/components/onkyo/ @arturpragacz @eclair4151 +/tests/components/onkyo/ @arturpragacz @eclair4151 /homeassistant/components/onvif/ @hunterjm /tests/components/onvif/ @hunterjm /homeassistant/components/open_meteo/ @frenck diff --git a/homeassistant/components/onkyo/config_flow.py b/homeassistant/components/onkyo/config_flow.py index a8ced6fae64..a484b3aaa04 100644 --- a/homeassistant/components/onkyo/config_flow.py +++ b/homeassistant/components/onkyo/config_flow.py @@ -4,7 +4,9 @@ import logging from typing import Any import voluptuous as vol +from yarl import URL +from homeassistant.components import ssdp from homeassistant.config_entries import ( SOURCE_RECONFIGURE, ConfigEntry, @@ -165,6 +167,49 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN): ), ) + async def async_step_ssdp( + self, discovery_info: ssdp.SsdpServiceInfo + ) -> ConfigFlowResult: + """Handle flow initialized by SSDP discovery.""" + _LOGGER.debug("Config flow start ssdp: %s", discovery_info) + + if udn := discovery_info.ssdp_udn: + udn_parts = udn.split(":") + if len(udn_parts) == 2: + uuid = udn_parts[1] + last_uuid_section = uuid.split("-")[-1].upper() + await self.async_set_unique_id(last_uuid_section) + self._abort_if_unique_id_configured() + + if discovery_info.ssdp_location is None: + _LOGGER.error("SSDP location is None") + return self.async_abort(reason="unknown") + + host = URL(discovery_info.ssdp_location).host + + if host is None: + _LOGGER.error("SSDP host is None") + return self.async_abort(reason="unknown") + + try: + info = await async_interview(host) + except OSError: + _LOGGER.exception("Unexpected exception interviewing host %s", host) + return self.async_abort(reason="unknown") + + if info is None: + _LOGGER.debug("SSDP eiscp is None: %s", host) + return self.async_abort(reason="cannot_connect") + + await self.async_set_unique_id(info.identifier) + self._abort_if_unique_id_configured(updates={CONF_HOST: info.host}) + + self._receiver_info = info + + title_string = f"{info.model_name} ({info.host})" + self.context["title_placeholders"] = {"name": title_string} + return await self.async_step_configure_receiver() + async def async_step_configure_receiver( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/onkyo/manifest.json b/homeassistant/components/onkyo/manifest.json index 0e75404b3eb..6f37fb61b44 100644 --- a/homeassistant/components/onkyo/manifest.json +++ b/homeassistant/components/onkyo/manifest.json @@ -1,11 +1,49 @@ { "domain": "onkyo", "name": "Onkyo", - "codeowners": ["@arturpragacz"], + "codeowners": ["@arturpragacz", "@eclair4151"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/onkyo", "integration_type": "device", "iot_class": "local_push", "loggers": ["pyeiscp"], - "requirements": ["pyeiscp==0.0.7"] + "requirements": ["pyeiscp==0.0.7"], + "ssdp": [ + { + "manufacturer": "ONKYO", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1" + }, + { + "manufacturer": "ONKYO", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2" + }, + { + "manufacturer": "ONKYO", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3" + }, + { + "manufacturer": "Onkyo & Pioneer Corporation", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1" + }, + { + "manufacturer": "Onkyo & Pioneer Corporation", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2" + }, + { + "manufacturer": "Onkyo & Pioneer Corporation", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3" + }, + { + "manufacturer": "Pioneer", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1" + }, + { + "manufacturer": "Pioneer", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2" + }, + { + "manufacturer": "Pioneer", + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3" + } + ] } diff --git a/homeassistant/generated/ssdp.py b/homeassistant/generated/ssdp.py index 9ed65bab868..89d1aa30cb8 100644 --- a/homeassistant/generated/ssdp.py +++ b/homeassistant/generated/ssdp.py @@ -224,6 +224,44 @@ SSDP = { "manufacturer": "The OctoPrint Project", }, ], + "onkyo": [ + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "ONKYO", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2", + "manufacturer": "ONKYO", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3", + "manufacturer": "ONKYO", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "Onkyo & Pioneer Corporation", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2", + "manufacturer": "Onkyo & Pioneer Corporation", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3", + "manufacturer": "Onkyo & Pioneer Corporation", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", + "manufacturer": "Pioneer", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:2", + "manufacturer": "Pioneer", + }, + { + "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:3", + "manufacturer": "Pioneer", + }, + ], "openhome": [ { "st": "urn:av-openhome-org:service:Product:1", diff --git a/tests/components/onkyo/test_config_flow.py b/tests/components/onkyo/test_config_flow.py index 1ee0bfdf9c5..f619127d9b9 100644 --- a/tests/components/onkyo/test_config_flow.py +++ b/tests/components/onkyo/test_config_flow.py @@ -6,6 +6,7 @@ from unittest.mock import patch import pytest from homeassistant import config_entries +from homeassistant.components import ssdp from homeassistant.components.onkyo import InputSource from homeassistant.components.onkyo.config_flow import OnkyoConfigFlow from homeassistant.components.onkyo.const import ( @@ -83,6 +84,35 @@ async def test_manual_invalid_host(hass: HomeAssistant, stub_mock_discovery) -> assert host_result["errors"]["base"] == "cannot_connect" +async def test_ssdp_discovery_already_configured( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test SSDP discovery with already configured device.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "192.168.1.100"}, + unique_id="id1", + ) + config_entry.add_to_hass(hass) + + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location="http://192.168.1.100:8080", + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_udn="uuid:00000000-0000-0000-0000-000000000000", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_manual_valid_host_unexpected_error( hass: HomeAssistant, empty_mock_discovery ) -> None: @@ -198,6 +228,123 @@ async def test_discovery_with_one_selected(hass: HomeAssistant) -> None: assert select_result["description_placeholders"]["name"] == "type 42 (host 42)" +async def test_ssdp_discovery_success( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test SSDP discovery with valid host.""" + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location="http://192.168.1.100:8080", + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_udn="uuid:00000000-0000-0000-0000-000000000000", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "configure_receiver" + + select_result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"volume_resolution": 200, "input_sources": ["TV"]}, + ) + + assert select_result["type"] is FlowResultType.CREATE_ENTRY + assert select_result["data"]["host"] == "192.168.1.100" + assert select_result["result"].unique_id == "id1" + + +async def test_ssdp_discovery_host_info_error(hass: HomeAssistant) -> None: + """Test SSDP discovery with host info error.""" + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location="http://192.168.1.100:8080", + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_st="mock_st", + ) + + with patch( + "homeassistant.components.onkyo.receiver.pyeiscp.Connection.discover", + side_effect=OSError, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" + + +async def test_ssdp_discovery_host_none_info( + hass: HomeAssistant, stub_mock_discovery +) -> None: + """Test SSDP discovery with host info error.""" + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location="http://192.168.1.100:8080", + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_ssdp_discovery_no_location( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test SSDP discovery with no location.""" + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location=None, + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" + + +async def test_ssdp_discovery_no_host( + hass: HomeAssistant, default_mock_discovery +) -> None: + """Test SSDP discovery with no host.""" + discovery_info = ssdp.SsdpServiceInfo( + ssdp_location="http://", + upnp={ssdp.ATTR_UPNP_FRIENDLY_NAME: "Onkyo Receiver"}, + ssdp_usn="uuid:mock_usn", + ssdp_st="mock_st", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_SSDP}, + data=discovery_info, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" + + async def test_configure_empty_source_list( hass: HomeAssistant, default_mock_discovery ) -> None: From a2be5a383c1bc0811828a92b2d600d0b0138e6b7 Mon Sep 17 00:00:00 2001 From: Ron Weikamp <15732230+ronweikamp@users.noreply.github.com> Date: Wed, 18 Dec 2024 10:41:46 +0100 Subject: [PATCH 400/677] Bugfix: also schedule time based integration when source is 0 (#133438) * Bugfix also schedule time based integration when source is 0 * Update tests/components/integration/test_sensor.py Co-authored-by: Diogo Gomes * Improve comment in test. Remove redundant assertion. --------- Co-authored-by: Diogo Gomes --- .../components/integration/sensor.py | 2 +- tests/components/integration/test_sensor.py | 33 +++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/integration/sensor.py b/homeassistant/components/integration/sensor.py index a053e5cea5c..27aa74d0785 100644 --- a/homeassistant/components/integration/sensor.py +++ b/homeassistant/components/integration/sensor.py @@ -576,7 +576,7 @@ class IntegrationSensor(RestoreSensor): if ( self._max_sub_interval is not None and source_state is not None - and (source_state_dec := _decimal_state(source_state.state)) + and (source_state_dec := _decimal_state(source_state.state)) is not None ): @callback diff --git a/tests/components/integration/test_sensor.py b/tests/components/integration/test_sensor.py index 974c8bb8691..07390cd9571 100644 --- a/tests/components/integration/test_sensor.py +++ b/tests/components/integration/test_sensor.py @@ -843,6 +843,39 @@ async def test_on_valid_source_expect_update_on_time( assert float(state.state) < 1.8 +async def test_on_0_source_expect_0_and_update_when_source_gets_positive( + hass: HomeAssistant, +) -> None: + """Test whether time based integration updates the integral on a valid zero source.""" + start_time = dt_util.utcnow() + + with freeze_time(start_time) as freezer: + await _setup_integral_sensor(hass, max_sub_interval=DEFAULT_MAX_SUB_INTERVAL) + await _update_source_sensor(hass, 0) + await hass.async_block_till_done() + + # wait one minute and one second + freezer.tick(61) + async_fire_time_changed(hass, dt_util.now()) + await hass.async_block_till_done() + + state = hass.states.get("sensor.integration") + + assert condition.async_numeric_state(hass, state) is True + assert float(state.state) == 0 # integral is 0 after integration of 0 + + # wait one second and update state + freezer.tick(1) + async_fire_time_changed(hass, dt_util.now()) + await _update_source_sensor(hass, 100) + await hass.async_block_till_done() + + state = hass.states.get("sensor.integration") + + # approx 100*1/3600 (right method after 1 second since last integration) + assert 0.027 < float(state.state) < 0.029 + + async def test_on_unvailable_source_expect_no_update_on_time( hass: HomeAssistant, ) -> None: From 8b8c4099162b9cc5b5af984a89d89077e2099bc2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 18 Dec 2024 10:44:19 +0100 Subject: [PATCH 401/677] Fix test-before-setup IQS check (#133467) --- .../hassfest/quality_scale_validation/test_before_setup.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/script/hassfest/quality_scale_validation/test_before_setup.py b/script/hassfest/quality_scale_validation/test_before_setup.py index 5f21a9d2458..1ac0d3d8e0b 100644 --- a/script/hassfest/quality_scale_validation/test_before_setup.py +++ b/script/hassfest/quality_scale_validation/test_before_setup.py @@ -17,13 +17,20 @@ _VALID_EXCEPTIONS = { def _get_exception_name(expression: ast.expr) -> str: """Get the name of the exception being raised.""" + if expression is None: + # Bare raise + return None + if isinstance(expression, ast.Name): + # Raise Exception return expression.id if isinstance(expression, ast.Call): + # Raise Exception() return _get_exception_name(expression.func) if isinstance(expression, ast.Attribute): + # Raise namespace.??? return _get_exception_name(expression.value) raise AssertionError( From a6520d2627f61df44b2aa15b1a72b2fa31c850dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=2E=20Diego=20Rodr=C3=ADguez=20Royo?= Date: Wed, 18 Dec 2024 10:52:45 +0100 Subject: [PATCH 402/677] Handle Home Connect error at diagnostics (#131644) --- .../components/home_connect/diagnostics.py | 9 +++- .../snapshots/test_diagnostics.ambr | 14 ++++++ .../home_connect/test_diagnostics.py | 48 +++++++++++++++++-- 3 files changed, 66 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/home_connect/diagnostics.py b/homeassistant/components/home_connect/diagnostics.py index d2505853d23..e095bc503ab 100644 --- a/homeassistant/components/home_connect/diagnostics.py +++ b/homeassistant/components/home_connect/diagnostics.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from homeconnect.api import HomeConnectAppliance +from homeconnect.api import HomeConnectAppliance, HomeConnectError from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry @@ -14,9 +14,14 @@ from .api import HomeConnectDevice def _generate_appliance_diagnostics(appliance: HomeConnectAppliance) -> dict[str, Any]: + try: + programs = appliance.get_programs_available() + except HomeConnectError: + programs = None return { + "connected": appliance.connected, "status": appliance.status, - "programs": appliance.get_programs_available(), + "programs": programs, } diff --git a/tests/components/home_connect/snapshots/test_diagnostics.ambr b/tests/components/home_connect/snapshots/test_diagnostics.ambr index 99f10fe2847..f3131eac52f 100644 --- a/tests/components/home_connect/snapshots/test_diagnostics.ambr +++ b/tests/components/home_connect/snapshots/test_diagnostics.ambr @@ -2,6 +2,7 @@ # name: test_async_get_config_entry_diagnostics dict({ 'BOSCH-000000000-000000000000': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -23,6 +24,7 @@ }), }), 'BOSCH-HCS000000-D00000000001': dict({ + 'connected': True, 'programs': list([ 'LaundryCare.WasherDryer.Program.Mix', 'LaundryCare.Washer.Option.Temperature', @@ -46,6 +48,7 @@ }), }), 'BOSCH-HCS000000-D00000000002': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -67,6 +70,7 @@ }), }), 'BOSCH-HCS000000-D00000000003': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -88,6 +92,7 @@ }), }), 'BOSCH-HCS000000-D00000000004': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -144,6 +149,7 @@ }), }), 'BOSCH-HCS000000-D00000000005': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -165,6 +171,7 @@ }), }), 'BOSCH-HCS000000-D00000000006': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -186,6 +193,7 @@ }), }), 'BOSCH-HCS01OVN1-43E0065FE245': dict({ + 'connected': True, 'programs': list([ 'Cooking.Oven.Program.HeatingMode.HotAir', 'Cooking.Oven.Program.HeatingMode.TopBottomHeating', @@ -217,6 +225,7 @@ }), }), 'BOSCH-HCS04DYR1-831694AE3C5A': dict({ + 'connected': True, 'programs': list([ 'LaundryCare.Dryer.Program.Cotton', 'LaundryCare.Dryer.Program.Synthetic', @@ -241,6 +250,7 @@ }), }), 'BOSCH-HCS06COM1-D70390681C2C': dict({ + 'connected': True, 'programs': list([ 'ConsumerProducts.CoffeeMaker.Program.Beverage.Espresso', 'ConsumerProducts.CoffeeMaker.Program.Beverage.EspressoMacchiato', @@ -268,6 +278,7 @@ }), }), 'SIEMENS-HCS02DWH1-6BE58C26DCC1': dict({ + 'connected': True, 'programs': list([ 'Dishcare.Dishwasher.Program.Auto1', 'Dishcare.Dishwasher.Program.Auto2', @@ -319,6 +330,7 @@ }), }), 'SIEMENS-HCS03WCH1-7BC6383CF794': dict({ + 'connected': True, 'programs': list([ 'LaundryCare.Washer.Program.Cotton', 'LaundryCare.Washer.Program.EasyCare', @@ -356,6 +368,7 @@ }), }), 'SIEMENS-HCS05FRF1-304F4F9E541D': dict({ + 'connected': True, 'programs': list([ ]), 'status': dict({ @@ -415,6 +428,7 @@ # --- # name: test_async_get_device_diagnostics dict({ + 'connected': True, 'programs': list([ 'Dishcare.Dishwasher.Program.Auto1', 'Dishcare.Dishwasher.Program.Auto2', diff --git a/tests/components/home_connect/test_diagnostics.py b/tests/components/home_connect/test_diagnostics.py index d0bc5e77735..f2db6e2b67a 100644 --- a/tests/components/home_connect/test_diagnostics.py +++ b/tests/components/home_connect/test_diagnostics.py @@ -1,8 +1,9 @@ """Test diagnostics for Home Connect.""" from collections.abc import Awaitable, Callable -from unittest.mock import MagicMock +from unittest.mock import MagicMock, Mock +from homeconnect.api import HomeConnectError import pytest from syrupy import SnapshotAssertion @@ -63,14 +64,13 @@ async def test_async_get_device_diagnostics( @pytest.mark.usefixtures("bypass_throttle") -async def test_async_device_diagnostics_exceptions( +async def test_async_device_diagnostics_not_found( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, get_appliances: MagicMock, device_registry: dr.DeviceRegistry, - snapshot: SnapshotAssertion, ) -> None: """Test device config entry diagnostics.""" get_appliances.side_effect = get_all_appliances @@ -85,3 +85,45 @@ async def test_async_device_diagnostics_exceptions( with pytest.raises(ValueError): await async_get_device_diagnostics(hass, config_entry, device) + + +@pytest.mark.parametrize( + ("api_error", "expected_connection_status"), + [ + (HomeConnectError(), "unknown"), + ( + HomeConnectError( + { + "key": "SDK.Error.HomeAppliance.Connection.Initialization.Failed", + } + ), + "offline", + ), + ], +) +@pytest.mark.usefixtures("bypass_throttle") +async def test_async_device_diagnostics_api_error( + api_error: HomeConnectError, + expected_connection_status: str, + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + appliance: Mock, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device config entry diagnostics.""" + appliance.get_programs_available.side_effect = api_error + get_appliances.return_value = [appliance] + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + device = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.haId)}, + ) + + diagnostics = await async_get_device_diagnostics(hass, config_entry, device) + assert diagnostics["programs"] is None From 90208d2eb1da153fd3ada4de5465bce4a70ef9d1 Mon Sep 17 00:00:00 2001 From: dotvav Date: Wed, 18 Dec 2024 10:58:25 +0100 Subject: [PATCH 403/677] Bump pypalazzetti to 0.1.15 (#133433) --- homeassistant/components/palazzetti/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/palazzetti/manifest.json b/homeassistant/components/palazzetti/manifest.json index 05a5d260b50..70e58507159 100644 --- a/homeassistant/components/palazzetti/manifest.json +++ b/homeassistant/components/palazzetti/manifest.json @@ -15,5 +15,5 @@ "documentation": "https://www.home-assistant.io/integrations/palazzetti", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["pypalazzetti==0.1.14"] + "requirements": ["pypalazzetti==0.1.15"] } diff --git a/requirements_all.txt b/requirements_all.txt index b01683cbf76..a6316379d8f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2168,7 +2168,7 @@ pyoverkiz==1.15.3 pyownet==0.10.0.post1 # homeassistant.components.palazzetti -pypalazzetti==0.1.14 +pypalazzetti==0.1.15 # homeassistant.components.elv pypca==0.0.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5b339e061b1..33e7327568e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1761,7 +1761,7 @@ pyoverkiz==1.15.3 pyownet==0.10.0.post1 # homeassistant.components.palazzetti -pypalazzetti==0.1.14 +pypalazzetti==0.1.15 # homeassistant.components.lcn pypck==0.7.24 From 869a0d7abc6230e1b8d0609681b459947a2c3bed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Wed, 18 Dec 2024 11:01:38 +0100 Subject: [PATCH 404/677] Add name to cloud connection info response (#133468) --- homeassistant/components/cloud/client.py | 1 + tests/components/cloud/test_client.py | 1 + 2 files changed, 2 insertions(+) diff --git a/homeassistant/components/cloud/client.py b/homeassistant/components/cloud/client.py index ee46fa42125..ea3d992e8f7 100644 --- a/homeassistant/components/cloud/client.py +++ b/homeassistant/components/cloud/client.py @@ -306,6 +306,7 @@ class CloudClient(Interface): }, "version": HA_VERSION, "instance_id": self.prefs.instance_id, + "name": self._hass.config.location_name, } async def async_alexa_message(self, payload: dict[Any, Any]) -> dict[Any, Any]: diff --git a/tests/components/cloud/test_client.py b/tests/components/cloud/test_client.py index 43eccc5ef9c..52457fe558c 100644 --- a/tests/components/cloud/test_client.py +++ b/tests/components/cloud/test_client.py @@ -441,6 +441,7 @@ async def test_cloud_connection_info(hass: HomeAssistant) -> None: assert response == { "instance_id": "12345678901234567890", + "name": "test home", "remote": { "alias": None, "can_enable": True, From fa0e54e658975b3559656828b6e2464414000f1a Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:05:52 +0100 Subject: [PATCH 405/677] Don't raise Overkiz user flow unique_id check (#133471) --- homeassistant/components/overkiz/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/config_flow.py b/homeassistant/components/overkiz/config_flow.py index af7e277d928..9a94c30d95d 100644 --- a/homeassistant/components/overkiz/config_flow.py +++ b/homeassistant/components/overkiz/config_flow.py @@ -76,7 +76,7 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN): for gateway in gateways: if is_overkiz_gateway(gateway.id): gateway_id = gateway.id - await self.async_set_unique_id(gateway_id) + await self.async_set_unique_id(gateway_id, raise_on_progress=False) return user_input From 05b0c56191aeb31b84361142790a6c1abbef9176 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:22:22 +0100 Subject: [PATCH 406/677] Use enum instead of string for button entities key in Overkiz (#133472) --- homeassistant/components/overkiz/button.py | 24 ++++++++++++++-------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/overkiz/button.py b/homeassistant/components/overkiz/button.py index 5a1116aeeb5..fcaa0b743dc 100644 --- a/homeassistant/components/overkiz/button.py +++ b/homeassistant/components/overkiz/button.py @@ -28,41 +28,47 @@ class OverkizButtonDescription(ButtonEntityDescription): BUTTON_DESCRIPTIONS: list[OverkizButtonDescription] = [ # My Position (cover, light) OverkizButtonDescription( - key="my", + key=OverkizCommand.MY, name="My position", icon="mdi:star", ), # Identify OverkizButtonDescription( - key="identify", # startIdentify and identify are reversed... Swap this when fixed in API. + key=OverkizCommand.IDENTIFY, # startIdentify and identify are reversed... Swap this when fixed in API. name="Start identify", icon="mdi:human-greeting-variant", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), OverkizButtonDescription( - key="stopIdentify", + key=OverkizCommand.STOP_IDENTIFY, name="Stop identify", icon="mdi:human-greeting-variant", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), OverkizButtonDescription( - key="startIdentify", # startIdentify and identify are reversed... Swap this when fixed in API. + key=OverkizCommand.START_IDENTIFY, # startIdentify and identify are reversed... Swap this when fixed in API. name="Identify", icon="mdi:human-greeting-variant", entity_category=EntityCategory.DIAGNOSTIC, ), # RTDIndoorSiren / RTDOutdoorSiren - OverkizButtonDescription(key="dingDong", name="Ding dong", icon="mdi:bell-ring"), - OverkizButtonDescription(key="bip", name="Bip", icon="mdi:bell-ring"), OverkizButtonDescription( - key="fastBipSequence", name="Fast bip sequence", icon="mdi:bell-ring" + key=OverkizCommand.DING_DONG, name="Ding dong", icon="mdi:bell-ring" + ), + OverkizButtonDescription(key=OverkizCommand.BIP, name="Bip", icon="mdi:bell-ring"), + OverkizButtonDescription( + key=OverkizCommand.FAST_BIP_SEQUENCE, + name="Fast bip sequence", + icon="mdi:bell-ring", + ), + OverkizButtonDescription( + key=OverkizCommand.RING, name="Ring", icon="mdi:bell-ring" ), - OverkizButtonDescription(key="ring", name="Ring", icon="mdi:bell-ring"), # DynamicScreen (ogp:blind) uses goToAlias (id 1: favorite1) instead of 'my' OverkizButtonDescription( - key="goToAlias", + key=OverkizCommand.GO_TO_ALIAS, press_args="1", name="My position", icon="mdi:star", From 7730f423b38c621c2d2c1665328b0a9907fa9504 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:22:32 +0100 Subject: [PATCH 407/677] Add identify device class in Overkiz (#133474) --- homeassistant/components/overkiz/button.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/button.py b/homeassistant/components/overkiz/button.py index fcaa0b743dc..a39c236725a 100644 --- a/homeassistant/components/overkiz/button.py +++ b/homeassistant/components/overkiz/button.py @@ -7,7 +7,11 @@ from dataclasses import dataclass from pyoverkiz.enums import OverkizCommand from pyoverkiz.types import StateType as OverkizStateType -from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant @@ -52,6 +56,7 @@ BUTTON_DESCRIPTIONS: list[OverkizButtonDescription] = [ name="Identify", icon="mdi:human-greeting-variant", entity_category=EntityCategory.DIAGNOSTIC, + device_class=ButtonDeviceClass.IDENTIFY, ), # RTDIndoorSiren / RTDOutdoorSiren OverkizButtonDescription( From 992afc4cd37fcdf155602c212ea9349085ffa562 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 18 Dec 2024 11:27:07 +0100 Subject: [PATCH 408/677] Set the with_strategy_settings to None for unknown backups (#133466) --- homeassistant/components/backup/manager.py | 6 +-- .../backup/snapshots/test_backup.ambr | 2 +- .../backup/snapshots/test_websocket.ambr | 42 +++++++++---------- tests/components/cloud/test_backup.py | 4 +- tests/components/hassio/test_backup.py | 4 +- tests/components/kitchen_sink/test_backup.py | 2 +- 6 files changed, 30 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index d6abc299317..a9bce8cb03d 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -60,7 +60,7 @@ class ManagerBackup(AgentBackup): agent_ids: list[str] failed_agent_ids: list[str] - with_strategy_settings: bool + with_strategy_settings: bool | None @dataclass(frozen=True, kw_only=True, slots=True) @@ -448,7 +448,7 @@ class BackupManager: with_strategy_settings = known_backup.with_strategy_settings else: failed_agent_ids = [] - with_strategy_settings = False + with_strategy_settings = None backups[backup_id] = ManagerBackup( agent_ids=[], addons=agent_backup.addons, @@ -497,7 +497,7 @@ class BackupManager: with_strategy_settings = known_backup.with_strategy_settings else: failed_agent_ids = [] - with_strategy_settings = False + with_strategy_settings = None backup = ManagerBackup( agent_ids=[], addons=result.addons, diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr index b350ff680ee..9ef865955fe 100644 --- a/tests/components/backup/snapshots/test_backup.ambr +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -78,7 +78,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 8bd4e2817b2..f43a7ed7a2c 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -1539,7 +1539,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1607,7 +1607,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1660,7 +1660,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1697,7 +1697,7 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1745,7 +1745,7 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1788,7 +1788,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1841,7 +1841,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -1895,7 +1895,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2003,7 +2003,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2056,7 +2056,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2109,7 +2109,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2216,7 +2216,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), }), 'success': True, @@ -2254,7 +2254,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), }), 'success': True, @@ -2305,7 +2305,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), }), 'success': True, @@ -2344,7 +2344,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), }), 'success': True, @@ -2607,7 +2607,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2649,7 +2649,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2692,7 +2692,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2729,7 +2729,7 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), dict({ 'addons': list([ @@ -2756,7 +2756,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, @@ -2799,7 +2799,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': False, + 'with_strategy_settings': None, }), ]), 'last_attempted_strategy_backup': None, diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 5e607bbc70b..bd8e80e0666 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -171,7 +171,7 @@ async def test_agents_list_backups( "size": 34519040, "agent_ids": ["cloud.cloud"], "failed_agent_ids": [], - "with_strategy_settings": False, + "with_strategy_settings": None, } ] @@ -218,7 +218,7 @@ async def test_agents_list_backups_fail_cloud( "size": 34519040, "agent_ids": ["cloud.cloud"], "failed_agent_ids": [], - "with_strategy_settings": False, + "with_strategy_settings": None, }, ), ( diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 75cc049f7b5..45aa28c19d6 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -341,7 +341,7 @@ async def test_agent_info( "name": "Test", "protected": False, "size": 1048576, - "with_strategy_settings": False, + "with_strategy_settings": None, }, ), ( @@ -362,7 +362,7 @@ async def test_agent_info( "name": "Test", "protected": False, "size": 1048576, - "with_strategy_settings": False, + "with_strategy_settings": None, }, ), ], diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py index 6a738094ae6..81876b5c3d1 100644 --- a/tests/components/kitchen_sink/test_backup.py +++ b/tests/components/kitchen_sink/test_backup.py @@ -104,7 +104,7 @@ async def test_agents_list_backups( "name": "Kitchen sink syncer", "protected": False, "size": 1234, - "with_strategy_settings": False, + "with_strategy_settings": None, } ] From fc4100833e3fc3db42574e60dd35920672bce052 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:43:04 +0100 Subject: [PATCH 409/677] Change device class from Volume to Volume Storage in Overkiz (#133473) Change device class from Volume to Volume Storage --- homeassistant/components/overkiz/sensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/sensor.py b/homeassistant/components/overkiz/sensor.py index 5c54a1bd383..184b4938fef 100644 --- a/homeassistant/components/overkiz/sensor.py +++ b/homeassistant/components/overkiz/sensor.py @@ -423,7 +423,7 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [ OverkizSensorDescription( key=OverkizState.CORE_REMAINING_HOT_WATER, name="Warm water remaining", - device_class=SensorDeviceClass.VOLUME, + device_class=SensorDeviceClass.VOLUME_STORAGE, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfVolume.LITERS, ), From 3bb62565729310a258f52f9fc8977ee919088dd4 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:48:10 +0100 Subject: [PATCH 410/677] Add test button for SmokeSensor in Overkiz (#133476) --- homeassistant/components/overkiz/button.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/button.py b/homeassistant/components/overkiz/button.py index a39c236725a..c34be5cde84 100644 --- a/homeassistant/components/overkiz/button.py +++ b/homeassistant/components/overkiz/button.py @@ -4,7 +4,7 @@ from __future__ import annotations from dataclasses import dataclass -from pyoverkiz.enums import OverkizCommand +from pyoverkiz.enums import OverkizCommand, OverkizCommandParam from pyoverkiz.types import StateType as OverkizStateType from homeassistant.components.button import ( @@ -83,6 +83,14 @@ BUTTON_DESCRIPTIONS: list[OverkizButtonDescription] = [ name="Toggle", icon="mdi:sync", ), + # SmokeSensor + OverkizButtonDescription( + key=OverkizCommand.CHECK_EVENT_TRIGGER, + press_args=OverkizCommandParam.SHORT, + name="Test", + icon="mdi:smoke-detector", + entity_category=EntityCategory.DIAGNOSTIC, + ), ] SUPPORTED_COMMANDS = { From be25cb7aa7fd6048b3a60fe724eeabfc7507e9fb Mon Sep 17 00:00:00 2001 From: greyeee <62752780+greyeee@users.noreply.github.com> Date: Wed, 18 Dec 2024 20:19:45 +0800 Subject: [PATCH 411/677] Add support for SwitchBot Relay Switch 1 and Relay Switch 1PM (#132327) --- .../components/switchbot_cloud/__init__.py | 9 +- .../components/switchbot_cloud/sensor.py | 122 +++++++++++++----- .../components/switchbot_cloud/switch.py | 17 +++ .../components/switchbot_cloud/test_switch.py | 56 ++++++++ 4 files changed, 169 insertions(+), 35 deletions(-) create mode 100644 tests/components/switchbot_cloud/test_switch.py diff --git a/homeassistant/components/switchbot_cloud/__init__.py b/homeassistant/components/switchbot_cloud/__init__.py index 625b4698301..827dce550ef 100644 --- a/homeassistant/components/switchbot_cloud/__init__.py +++ b/homeassistant/components/switchbot_cloud/__init__.py @@ -75,9 +75,11 @@ def make_device_data( ) if ( isinstance(device, Device) - and device.device_type.startswith("Plug") - or isinstance(device, Remote) - ): + and ( + device.device_type.startswith("Plug") + or device.device_type in ["Relay Switch 1PM", "Relay Switch 1"] + ) + ) or isinstance(device, Remote): devices_data.switches.append( prepare_device(hass, api, device, coordinators_by_id) ) @@ -88,6 +90,7 @@ def make_device_data( "Hub 2", "MeterPro", "MeterPro(CO2)", + "Relay Switch 1PM", ]: devices_data.sensors.append( prepare_device(hass, api, device, coordinators_by_id) diff --git a/homeassistant/components/switchbot_cloud/sensor.py b/homeassistant/components/switchbot_cloud/sensor.py index 90135ad96b3..ae912e914ba 100644 --- a/homeassistant/components/switchbot_cloud/sensor.py +++ b/homeassistant/components/switchbot_cloud/sensor.py @@ -12,6 +12,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfPower, UnitOfTemperature, ) from homeassistant.core import HomeAssistant, callback @@ -26,38 +29,97 @@ SENSOR_TYPE_TEMPERATURE = "temperature" SENSOR_TYPE_HUMIDITY = "humidity" SENSOR_TYPE_BATTERY = "battery" SENSOR_TYPE_CO2 = "CO2" +SENSOR_TYPE_POWER = "power" +SENSOR_TYPE_VOLTAGE = "voltage" +SENSOR_TYPE_CURRENT = "electricCurrent" -METER_PLUS_SENSOR_DESCRIPTIONS = ( - SensorEntityDescription( - key=SENSOR_TYPE_TEMPERATURE, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), - SensorEntityDescription( - key=SENSOR_TYPE_HUMIDITY, - device_class=SensorDeviceClass.HUMIDITY, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - ), - SensorEntityDescription( - key=SENSOR_TYPE_BATTERY, - device_class=SensorDeviceClass.BATTERY, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - ), +TEMPERATURE_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_TEMPERATURE, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, ) -METER_PRO_CO2_SENSOR_DESCRIPTIONS = ( - *METER_PLUS_SENSOR_DESCRIPTIONS, - SensorEntityDescription( - key=SENSOR_TYPE_CO2, - native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, - state_class=SensorStateClass.MEASUREMENT, - device_class=SensorDeviceClass.CO2, - ), +HUMIDITY_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_HUMIDITY, + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, ) +BATTERY_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_BATTERY, + device_class=SensorDeviceClass.BATTERY, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, +) + +POWER_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_POWER, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, +) + +VOLATGE_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_VOLTAGE, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, +) + +CURRENT_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_CURRENT, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, +) + +CO2_DESCRIPTION = SensorEntityDescription( + key=SENSOR_TYPE_CO2, + device_class=SensorDeviceClass.CO2, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, +) + +SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = { + "Meter": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + ), + "MeterPlus": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + ), + "WoIOSensor": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + ), + "Relay Switch 1PM": ( + POWER_DESCRIPTION, + VOLATGE_DESCRIPTION, + CURRENT_DESCRIPTION, + ), + "Hub 2": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + ), + "MeterPro": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + ), + "MeterPro(CO2)": ( + TEMPERATURE_DESCRIPTION, + HUMIDITY_DESCRIPTION, + BATTERY_DESCRIPTION, + CO2_DESCRIPTION, + ), +} + async def async_setup_entry( hass: HomeAssistant, @@ -70,11 +132,7 @@ async def async_setup_entry( async_add_entities( SwitchBotCloudSensor(data.api, device, coordinator, description) for device, coordinator in data.devices.sensors - for description in ( - METER_PRO_CO2_SENSOR_DESCRIPTIONS - if device.device_type == "MeterPro(CO2)" - else METER_PLUS_SENSOR_DESCRIPTIONS - ) + for description in SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES[device.device_type] ) diff --git a/homeassistant/components/switchbot_cloud/switch.py b/homeassistant/components/switchbot_cloud/switch.py index c30e60086fa..281ebb9322e 100644 --- a/homeassistant/components/switchbot_cloud/switch.py +++ b/homeassistant/components/switchbot_cloud/switch.py @@ -69,6 +69,18 @@ class SwitchBotCloudPlugSwitch(SwitchBotCloudSwitch): _attr_device_class = SwitchDeviceClass.OUTLET +class SwitchBotCloudRelaySwitchSwitch(SwitchBotCloudSwitch): + """Representation of a SwitchBot relay switch.""" + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if not self.coordinator.data: + return + self._attr_is_on = self.coordinator.data.get("switchStatus") == 1 + self.async_write_ha_state() + + @callback def _async_make_entity( api: SwitchBotAPI, device: Device | Remote, coordinator: SwitchBotCoordinator @@ -78,4 +90,9 @@ def _async_make_entity( return SwitchBotCloudRemoteSwitch(api, device, coordinator) if "Plug" in device.device_type: return SwitchBotCloudPlugSwitch(api, device, coordinator) + if device.device_type in [ + "Relay Switch 1PM", + "Relay Switch 1", + ]: + return SwitchBotCloudRelaySwitchSwitch(api, device, coordinator) raise NotImplementedError(f"Unsupported device type: {device.device_type}") diff --git a/tests/components/switchbot_cloud/test_switch.py b/tests/components/switchbot_cloud/test_switch.py new file mode 100644 index 00000000000..d4ef2c84549 --- /dev/null +++ b/tests/components/switchbot_cloud/test_switch.py @@ -0,0 +1,56 @@ +"""Test for the switchbot_cloud relay switch.""" + +from unittest.mock import patch + +from switchbot_api import Device + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.switchbot_cloud import SwitchBotAPI +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, +) +from homeassistant.core import HomeAssistant + +from . import configure_integration + + +async def test_relay_switch( + hass: HomeAssistant, mock_list_devices, mock_get_status +) -> None: + """Test turn on and turn off.""" + mock_list_devices.return_value = [ + Device( + deviceId="relay-switch-id-1", + deviceName="relay-switch-1", + deviceType="Relay Switch 1", + hubDeviceId="test-hub-id", + ), + ] + + mock_get_status.return_value = {"switchStatus": 0} + + entry = configure_integration(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + + entity_id = "switch.relay_switch_1" + assert hass.states.get(entity_id).state == STATE_OFF + + with patch.object(SwitchBotAPI, "send_command"): + await hass.services.async_call( + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert hass.states.get(entity_id).state == STATE_ON + + with patch.object(SwitchBotAPI, "send_command"): + await hass.services.async_call( + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert hass.states.get(entity_id).state == STATE_OFF From 2aba1d399b100cf310c638ef776e0390a806f913 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Wed, 18 Dec 2024 13:47:30 +0100 Subject: [PATCH 412/677] Rename test file to singular form (#133482) --- tests/components/atag/{test_sensors.py => test_sensor.py} | 0 tests/components/broadlink/{test_sensors.py => test_sensor.py} | 0 tests/components/cert_expiry/{test_sensors.py => test_sensor.py} | 0 tests/components/idasen_desk/{test_buttons.py => test_button.py} | 0 tests/components/idasen_desk/{test_sensors.py => test_sensor.py} | 0 .../{test_binary_sensors.ambr => test_binary_sensor.ambr} | 0 .../madvr/snapshots/{test_sensors.ambr => test_sensor.ambr} | 0 .../madvr/{test_binary_sensors.py => test_binary_sensor.py} | 0 tests/components/madvr/{test_sensors.py => test_sensor.py} | 0 .../{test_binary_sensors.ambr => test_binary_sensor.ambr} | 0 .../tesla_fleet/{test_binary_sensors.py => test_binary_sensor.py} | 0 .../{test_binary_sensors.ambr => test_binary_sensor.ambr} | 0 .../teslemetry/{test_binary_sensors.py => test_binary_sensor.py} | 0 .../{test_binary_sensors.ambr => test_binary_sensor.ambr} | 0 .../tessie/{test_binary_sensors.py => test_binary_sensor.py} | 0 15 files changed, 0 insertions(+), 0 deletions(-) rename tests/components/atag/{test_sensors.py => test_sensor.py} (100%) rename tests/components/broadlink/{test_sensors.py => test_sensor.py} (100%) rename tests/components/cert_expiry/{test_sensors.py => test_sensor.py} (100%) rename tests/components/idasen_desk/{test_buttons.py => test_button.py} (100%) rename tests/components/idasen_desk/{test_sensors.py => test_sensor.py} (100%) rename tests/components/madvr/snapshots/{test_binary_sensors.ambr => test_binary_sensor.ambr} (100%) rename tests/components/madvr/snapshots/{test_sensors.ambr => test_sensor.ambr} (100%) rename tests/components/madvr/{test_binary_sensors.py => test_binary_sensor.py} (100%) rename tests/components/madvr/{test_sensors.py => test_sensor.py} (100%) rename tests/components/tesla_fleet/snapshots/{test_binary_sensors.ambr => test_binary_sensor.ambr} (100%) rename tests/components/tesla_fleet/{test_binary_sensors.py => test_binary_sensor.py} (100%) rename tests/components/teslemetry/snapshots/{test_binary_sensors.ambr => test_binary_sensor.ambr} (100%) rename tests/components/teslemetry/{test_binary_sensors.py => test_binary_sensor.py} (100%) rename tests/components/tessie/snapshots/{test_binary_sensors.ambr => test_binary_sensor.ambr} (100%) rename tests/components/tessie/{test_binary_sensors.py => test_binary_sensor.py} (100%) diff --git a/tests/components/atag/test_sensors.py b/tests/components/atag/test_sensor.py similarity index 100% rename from tests/components/atag/test_sensors.py rename to tests/components/atag/test_sensor.py diff --git a/tests/components/broadlink/test_sensors.py b/tests/components/broadlink/test_sensor.py similarity index 100% rename from tests/components/broadlink/test_sensors.py rename to tests/components/broadlink/test_sensor.py diff --git a/tests/components/cert_expiry/test_sensors.py b/tests/components/cert_expiry/test_sensor.py similarity index 100% rename from tests/components/cert_expiry/test_sensors.py rename to tests/components/cert_expiry/test_sensor.py diff --git a/tests/components/idasen_desk/test_buttons.py b/tests/components/idasen_desk/test_button.py similarity index 100% rename from tests/components/idasen_desk/test_buttons.py rename to tests/components/idasen_desk/test_button.py diff --git a/tests/components/idasen_desk/test_sensors.py b/tests/components/idasen_desk/test_sensor.py similarity index 100% rename from tests/components/idasen_desk/test_sensors.py rename to tests/components/idasen_desk/test_sensor.py diff --git a/tests/components/madvr/snapshots/test_binary_sensors.ambr b/tests/components/madvr/snapshots/test_binary_sensor.ambr similarity index 100% rename from tests/components/madvr/snapshots/test_binary_sensors.ambr rename to tests/components/madvr/snapshots/test_binary_sensor.ambr diff --git a/tests/components/madvr/snapshots/test_sensors.ambr b/tests/components/madvr/snapshots/test_sensor.ambr similarity index 100% rename from tests/components/madvr/snapshots/test_sensors.ambr rename to tests/components/madvr/snapshots/test_sensor.ambr diff --git a/tests/components/madvr/test_binary_sensors.py b/tests/components/madvr/test_binary_sensor.py similarity index 100% rename from tests/components/madvr/test_binary_sensors.py rename to tests/components/madvr/test_binary_sensor.py diff --git a/tests/components/madvr/test_sensors.py b/tests/components/madvr/test_sensor.py similarity index 100% rename from tests/components/madvr/test_sensors.py rename to tests/components/madvr/test_sensor.py diff --git a/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr b/tests/components/tesla_fleet/snapshots/test_binary_sensor.ambr similarity index 100% rename from tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr rename to tests/components/tesla_fleet/snapshots/test_binary_sensor.ambr diff --git a/tests/components/tesla_fleet/test_binary_sensors.py b/tests/components/tesla_fleet/test_binary_sensor.py similarity index 100% rename from tests/components/tesla_fleet/test_binary_sensors.py rename to tests/components/tesla_fleet/test_binary_sensor.py diff --git a/tests/components/teslemetry/snapshots/test_binary_sensors.ambr b/tests/components/teslemetry/snapshots/test_binary_sensor.ambr similarity index 100% rename from tests/components/teslemetry/snapshots/test_binary_sensors.ambr rename to tests/components/teslemetry/snapshots/test_binary_sensor.ambr diff --git a/tests/components/teslemetry/test_binary_sensors.py b/tests/components/teslemetry/test_binary_sensor.py similarity index 100% rename from tests/components/teslemetry/test_binary_sensors.py rename to tests/components/teslemetry/test_binary_sensor.py diff --git a/tests/components/tessie/snapshots/test_binary_sensors.ambr b/tests/components/tessie/snapshots/test_binary_sensor.ambr similarity index 100% rename from tests/components/tessie/snapshots/test_binary_sensors.ambr rename to tests/components/tessie/snapshots/test_binary_sensor.ambr diff --git a/tests/components/tessie/test_binary_sensors.py b/tests/components/tessie/test_binary_sensor.py similarity index 100% rename from tests/components/tessie/test_binary_sensors.py rename to tests/components/tessie/test_binary_sensor.py From ecb3bf79f32a2e25d141ff467e5958826ed9fc3a Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 18 Dec 2024 13:51:05 +0100 Subject: [PATCH 413/677] Revert "Add support for subentries to config entries" (#133470) Revert "Add support for subentries to config entries (#117355)" This reverts commit ad15786115673c5b3fe40ea2f5d61b4b896f433e. --- .../components/config/config_entries.py | 126 ---- homeassistant/config_entries.py | 315 +-------- homeassistant/helpers/data_entry_flow.py | 4 +- script/hassfest/translations.py | 9 - tests/common.py | 2 - .../aemet/snapshots/test_diagnostics.ambr | 2 - .../airly/snapshots/test_diagnostics.ambr | 2 - .../airnow/snapshots/test_diagnostics.ambr | 2 - .../airvisual/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../airzone/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../axis/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../blink/snapshots/test_diagnostics.ambr | 2 - .../braviatv/snapshots/test_diagnostics.ambr | 2 - .../co2signal/snapshots/test_diagnostics.ambr | 2 - .../coinbase/snapshots/test_diagnostics.ambr | 2 - .../comelit/snapshots/test_diagnostics.ambr | 4 - .../components/config/test_config_entries.py | 469 ------------- .../deconz/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../ecovacs/snapshots/test_diagnostics.ambr | 4 - .../snapshots/test_config_flow.ambr | 4 - .../snapshots/test_diagnostics.ambr | 6 - .../esphome/snapshots/test_diagnostics.ambr | 2 - tests/components/esphome/test_diagnostics.py | 1 - .../forecast_solar/snapshots/test_init.ambr | 2 - .../fritz/snapshots/test_diagnostics.ambr | 2 - .../fronius/snapshots/test_diagnostics.ambr | 2 - .../fyta/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_config_flow.ambr | 8 - .../gios/snapshots/test_diagnostics.ambr | 2 - .../goodwe/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - tests/components/guardian/test_diagnostics.py | 1 - .../snapshots/test_config_flow.ambr | 16 - .../snapshots/test_diagnostics.ambr | 2 - .../imgw_pib/snapshots/test_diagnostics.ambr | 2 - .../iqvia/snapshots/test_diagnostics.ambr | 2 - .../kostal_plenticore/test_diagnostics.py | 1 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../madvr/snapshots/test_diagnostics.ambr | 2 - .../melcloud/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../netatmo/snapshots/test_diagnostics.ambr | 2 - .../nextdns/snapshots/test_diagnostics.ambr | 2 - .../nice_go/snapshots/test_diagnostics.ambr | 2 - tests/components/notion/test_diagnostics.py | 1 - .../onvif/snapshots/test_diagnostics.ambr | 2 - tests/components/openuv/test_diagnostics.py | 1 - .../p1_monitor/snapshots/test_init.ambr | 4 - .../snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../components/philips_js/test_config_flow.py | 1 - .../pi_hole/snapshots/test_diagnostics.ambr | 2 - .../proximity/snapshots/test_diagnostics.ambr | 2 - tests/components/ps4/test_init.py | 1 - .../components/purpleair/test_diagnostics.py | 1 - .../snapshots/test_diagnostics.ambr | 4 - .../snapshots/test_diagnostics.ambr | 4 - .../recollect_waste/test_diagnostics.py | 1 - .../ridwell/snapshots/test_diagnostics.ambr | 2 - .../components/samsungtv/test_diagnostics.py | 3 - .../snapshots/test_diagnostics.ambr | 2 - .../components/simplisafe/test_diagnostics.py | 1 - .../solarlog/snapshots/test_diagnostics.ambr | 2 - tests/components/subaru/test_config_flow.py | 2 - .../switcher_kis/test_diagnostics.py | 1 - .../snapshots/test_diagnostics.ambr | 4 - .../snapshots/test_diagnostics.ambr | 2 - .../tractive/snapshots/test_diagnostics.ambr | 2 - .../tuya/snapshots/test_config_flow.ambr | 8 - .../twinkly/snapshots/test_diagnostics.ambr | 2 - .../unifi/snapshots/test_diagnostics.ambr | 2 - .../uptime/snapshots/test_config_flow.ambr | 4 - .../snapshots/test_diagnostics.ambr | 2 - .../v2c/snapshots/test_diagnostics.ambr | 2 - .../vicare/snapshots/test_diagnostics.ambr | 2 - .../snapshots/test_diagnostics.ambr | 2 - .../watttime/snapshots/test_diagnostics.ambr | 2 - .../webmin/snapshots/test_diagnostics.ambr | 2 - tests/components/webostv/test_diagnostics.py | 1 - .../whirlpool/snapshots/test_diagnostics.ambr | 2 - .../whois/snapshots/test_config_flow.ambr | 20 - .../workday/snapshots/test_diagnostics.ambr | 2 - .../wyoming/snapshots/test_config_flow.ambr | 12 - .../zha/snapshots/test_diagnostics.ambr | 2 - tests/snapshots/test_config_entries.ambr | 2 - tests/test_config_entries.py | 643 +----------------- 95 files changed, 33 insertions(+), 1774 deletions(-) diff --git a/homeassistant/components/config/config_entries.py b/homeassistant/components/config/config_entries.py index 5794819995d..da50f7e93a1 100644 --- a/homeassistant/components/config/config_entries.py +++ b/homeassistant/components/config/config_entries.py @@ -46,13 +46,6 @@ def async_setup(hass: HomeAssistant) -> bool: hass.http.register_view(OptionManagerFlowIndexView(hass.config_entries.options)) hass.http.register_view(OptionManagerFlowResourceView(hass.config_entries.options)) - hass.http.register_view( - SubentryManagerFlowIndexView(hass.config_entries.subentries) - ) - hass.http.register_view( - SubentryManagerFlowResourceView(hass.config_entries.subentries) - ) - websocket_api.async_register_command(hass, config_entries_get) websocket_api.async_register_command(hass, config_entry_disable) websocket_api.async_register_command(hass, config_entry_get_single) @@ -61,9 +54,6 @@ def async_setup(hass: HomeAssistant) -> bool: websocket_api.async_register_command(hass, config_entries_progress) websocket_api.async_register_command(hass, ignore_config_flow) - websocket_api.async_register_command(hass, config_subentry_delete) - websocket_api.async_register_command(hass, config_subentry_list) - return True @@ -295,63 +285,6 @@ class OptionManagerFlowResourceView( return await super().post(request, flow_id) -class SubentryManagerFlowIndexView( - FlowManagerIndexView[config_entries.ConfigSubentryFlowManager] -): - """View to create subentry flows.""" - - url = "/api/config/config_entries/subentries/flow" - name = "api:config:config_entries:subentries:flow" - - @require_admin( - error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) - ) - @RequestDataValidator( - vol.Schema( - { - vol.Required("handler"): vol.All(vol.Coerce(tuple), (str, str)), - vol.Optional("show_advanced_options", default=False): cv.boolean, - }, - extra=vol.ALLOW_EXTRA, - ) - ) - async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: - """Handle a POST request. - - handler in request is [entry_id, subentry_type]. - """ - return await super()._post_impl(request, data) - - def get_context(self, data: dict[str, Any]) -> dict[str, Any]: - """Return context.""" - context = super().get_context(data) - context["source"] = config_entries.SOURCE_USER - return context - - -class SubentryManagerFlowResourceView( - FlowManagerResourceView[config_entries.ConfigSubentryFlowManager] -): - """View to interact with the subentry flow manager.""" - - url = "/api/config/config_entries/subentries/flow/{flow_id}" - name = "api:config:config_entries:subentries:flow:resource" - - @require_admin( - error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) - ) - async def get(self, request: web.Request, /, flow_id: str) -> web.Response: - """Get the current state of a data_entry_flow.""" - return await super().get(request, flow_id) - - @require_admin( - error=Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT) - ) - async def post(self, request: web.Request, flow_id: str) -> web.Response: - """Handle a POST request.""" - return await super().post(request, flow_id) - - @websocket_api.require_admin @websocket_api.websocket_command({"type": "config_entries/flow/progress"}) def config_entries_progress( @@ -655,62 +588,3 @@ async def _async_matching_config_entries_json_fragments( ) or (filter_is_not_helper and entry.domain not in integrations) ] - - -@websocket_api.require_admin -@websocket_api.websocket_command( - { - "type": "config_entries/subentries/list", - "entry_id": str, - } -) -@websocket_api.async_response -async def config_subentry_list( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], -) -> None: - """List subentries of a config entry.""" - entry = get_entry(hass, connection, msg["entry_id"], msg["id"]) - if entry is None: - return - - result = [ - { - "subentry_id": subentry.subentry_id, - "title": subentry.title, - "unique_id": subentry.unique_id, - } - for subentry_id, subentry in entry.subentries.items() - ] - connection.send_result(msg["id"], result) - - -@websocket_api.require_admin -@websocket_api.websocket_command( - { - "type": "config_entries/subentries/delete", - "entry_id": str, - "subentry_id": str, - } -) -@websocket_api.async_response -async def config_subentry_delete( - hass: HomeAssistant, - connection: websocket_api.ActiveConnection, - msg: dict[str, Any], -) -> None: - """Delete a subentry of a config entry.""" - entry = get_entry(hass, connection, msg["entry_id"], msg["id"]) - if entry is None: - return - - try: - hass.config_entries.async_remove_subentry(entry, msg["subentry_id"]) - except config_entries.UnknownSubEntry: - connection.send_error( - msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config subentry not found" - ) - return - - connection.send_result(msg["id"]) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index d34828f5e46..ade4cd855ca 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -15,7 +15,6 @@ from collections.abc import ( ) from contextvars import ContextVar from copy import deepcopy -from dataclasses import dataclass, field from datetime import datetime from enum import Enum, StrEnum import functools @@ -23,7 +22,7 @@ from functools import cache import logging from random import randint from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Generic, Self, TypedDict, cast +from typing import TYPE_CHECKING, Any, Generic, Self, cast from async_interrupt import interrupt from propcache import cached_property @@ -129,7 +128,7 @@ HANDLERS: Registry[str, type[ConfigFlow]] = Registry() STORAGE_KEY = "core.config_entries" STORAGE_VERSION = 1 -STORAGE_VERSION_MINOR = 5 +STORAGE_VERSION_MINOR = 4 SAVE_DELAY = 1 @@ -257,10 +256,6 @@ class UnknownEntry(ConfigError): """Unknown entry specified.""" -class UnknownSubEntry(ConfigError): - """Unknown subentry specified.""" - - class OperationNotAllowed(ConfigError): """Raised when a config entry operation is not allowed.""" @@ -305,7 +300,6 @@ class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False): minor_version: int options: Mapping[str, Any] - subentries: Iterable[ConfigSubentryData] version: int @@ -319,51 +313,6 @@ def _validate_item(*, disabled_by: ConfigEntryDisabler | Any | None = None) -> N ) -class ConfigSubentryData(TypedDict): - """Container for configuration subentry data. - - Returned by integrations, a subentry_id will be assigned automatically. - """ - - data: Mapping[str, Any] - title: str - unique_id: str | None - - -class ConfigSubentryDataWithId(ConfigSubentryData): - """Container for configuration subentry data. - - This type is used when loading existing subentries from storage. - """ - - subentry_id: str - - -class SubentryFlowResult(FlowResult[FlowContext, tuple[str, str]], total=False): - """Typed result dict for subentry flow.""" - - unique_id: str | None - - -@dataclass(frozen=True, kw_only=True) -class ConfigSubentry: - """Container for a configuration subentry.""" - - data: MappingProxyType[str, Any] - subentry_id: str = field(default_factory=ulid_util.ulid_now) - title: str - unique_id: str | None - - def as_dict(self) -> ConfigSubentryDataWithId: - """Return dictionary version of this subentry.""" - return { - "data": dict(self.data), - "subentry_id": self.subentry_id, - "title": self.title, - "unique_id": self.unique_id, - } - - class ConfigEntry(Generic[_DataT]): """Hold a configuration entry.""" @@ -373,7 +322,6 @@ class ConfigEntry(Generic[_DataT]): data: MappingProxyType[str, Any] runtime_data: _DataT options: MappingProxyType[str, Any] - subentries: MappingProxyType[str, ConfigSubentry] unique_id: str | None state: ConfigEntryState reason: str | None @@ -389,7 +337,6 @@ class ConfigEntry(Generic[_DataT]): supports_remove_device: bool | None _supports_options: bool | None _supports_reconfigure: bool | None - _supported_subentries: tuple[str, ...] | None update_listeners: list[UpdateListenerType] _async_cancel_retry_setup: Callable[[], Any] | None _on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None @@ -419,7 +366,6 @@ class ConfigEntry(Generic[_DataT]): pref_disable_polling: bool | None = None, source: str, state: ConfigEntryState = ConfigEntryState.NOT_LOADED, - subentries_data: Iterable[ConfigSubentryData | ConfigSubentryDataWithId] | None, title: str, unique_id: str | None, version: int, @@ -445,24 +391,6 @@ class ConfigEntry(Generic[_DataT]): # Entry options _setter(self, "options", MappingProxyType(options or {})) - # Subentries - subentries_data = subentries_data or () - subentries = {} - for subentry_data in subentries_data: - subentry_kwargs = {} - if "subentry_id" in subentry_data: - # If subentry_data has key "subentry_id", we're loading from storage - subentry_kwargs["subentry_id"] = subentry_data["subentry_id"] # type: ignore[typeddict-item] - subentry = ConfigSubentry( - data=MappingProxyType(subentry_data["data"]), - title=subentry_data["title"], - unique_id=subentry_data.get("unique_id"), - **subentry_kwargs, - ) - subentries[subentry.subentry_id] = subentry - - _setter(self, "subentries", MappingProxyType(subentries)) - # Entry system options if pref_disable_new_entities is None: pref_disable_new_entities = False @@ -499,9 +427,6 @@ class ConfigEntry(Generic[_DataT]): # Supports reconfigure _setter(self, "_supports_reconfigure", None) - # Supports subentries - _setter(self, "_supported_subentries", None) - # Listeners to call on update _setter(self, "update_listeners", []) @@ -574,18 +499,6 @@ class ConfigEntry(Generic[_DataT]): ) return self._supports_reconfigure or False - @property - def supported_subentries(self) -> tuple[str, ...]: - """Return supported subentries.""" - if self._supported_subentries is None and ( - handler := HANDLERS.get(self.domain) - ): - # work out sub entries supported by the handler - object.__setattr__( - self, "_supported_subentries", handler.async_supported_subentries(self) - ) - return self._supported_subentries or () - def clear_state_cache(self) -> None: """Clear cached properties that are included in as_json_fragment.""" self.__dict__.pop("as_json_fragment", None) @@ -605,14 +518,12 @@ class ConfigEntry(Generic[_DataT]): "supports_remove_device": self.supports_remove_device or False, "supports_unload": self.supports_unload or False, "supports_reconfigure": self.supports_reconfigure, - "supported_subentries": self.supported_subentries, "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, "disabled_by": self.disabled_by, "reason": self.reason, "error_reason_translation_key": self.error_reason_translation_key, "error_reason_translation_placeholders": self.error_reason_translation_placeholders, - "num_subentries": len(self.subentries), } return json_fragment(json_bytes(json_repr)) @@ -1107,7 +1018,6 @@ class ConfigEntry(Generic[_DataT]): "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, "source": self.source, - "subentries": [subentry.as_dict() for subentry in self.subentries.values()], "title": self.title, "unique_id": self.unique_id, "version": self.version, @@ -1593,7 +1503,6 @@ class ConfigEntriesFlowManager( minor_version=result["minor_version"], options=result["options"], source=flow.context["source"], - subentries_data=result["subentries"], title=result["title"], unique_id=flow.unique_id, version=result["version"], @@ -1884,11 +1793,6 @@ class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]): for entry in data["entries"]: entry["discovery_keys"] = {} - if old_minor_version < 5: - # Version 1.4 adds config subentries - for entry in data["entries"]: - entry.setdefault("subentries", entry.get("subentries", {})) - if old_major_version > 1: raise NotImplementedError return data @@ -1905,7 +1809,6 @@ class ConfigEntries: self.hass = hass self.flow = ConfigEntriesFlowManager(hass, self, hass_config) self.options = OptionsFlowManager(hass) - self.subentries = ConfigSubentryFlowManager(hass) self._hass_config = hass_config self._entries = ConfigEntryItems(hass) self._store = ConfigEntryStore(hass) @@ -2108,7 +2011,6 @@ class ConfigEntries: pref_disable_new_entities=entry["pref_disable_new_entities"], pref_disable_polling=entry["pref_disable_polling"], source=entry["source"], - subentries_data=entry["subentries"], title=entry["title"], unique_id=entry["unique_id"], version=entry["version"], @@ -2268,44 +2170,6 @@ class ConfigEntries: If the entry was changed, the update_listeners are fired and this function returns True - If the entry was not changed, the update_listeners are - not fired and this function returns False - """ - return self._async_update_entry( - entry, - data=data, - discovery_keys=discovery_keys, - minor_version=minor_version, - options=options, - pref_disable_new_entities=pref_disable_new_entities, - pref_disable_polling=pref_disable_polling, - title=title, - unique_id=unique_id, - version=version, - ) - - @callback - def _async_update_entry( - self, - entry: ConfigEntry, - *, - data: Mapping[str, Any] | UndefinedType = UNDEFINED, - discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]] - | UndefinedType = UNDEFINED, - minor_version: int | UndefinedType = UNDEFINED, - options: Mapping[str, Any] | UndefinedType = UNDEFINED, - pref_disable_new_entities: bool | UndefinedType = UNDEFINED, - pref_disable_polling: bool | UndefinedType = UNDEFINED, - subentries: dict[str, ConfigSubentry] | UndefinedType = UNDEFINED, - title: str | UndefinedType = UNDEFINED, - unique_id: str | None | UndefinedType = UNDEFINED, - version: int | UndefinedType = UNDEFINED, - ) -> bool: - """Update a config entry. - - If the entry was changed, the update_listeners are - fired and this function returns True - If the entry was not changed, the update_listeners are not fired and this function returns False """ @@ -2368,11 +2232,6 @@ class ConfigEntries: changed = True _setter(entry, "options", MappingProxyType(options)) - if subentries is not UNDEFINED: - if entry.subentries != subentries: - changed = True - _setter(entry, "subentries", MappingProxyType(subentries)) - if not changed: return False @@ -2390,37 +2249,6 @@ class ConfigEntries: self._async_dispatch(ConfigEntryChange.UPDATED, entry) return True - @callback - def async_add_subentry(self, entry: ConfigEntry, subentry: ConfigSubentry) -> bool: - """Add a subentry to a config entry.""" - self._raise_if_subentry_unique_id_exists(entry, subentry.unique_id) - - return self._async_update_entry( - entry, - subentries=entry.subentries | {subentry.subentry_id: subentry}, - ) - - @callback - def async_remove_subentry(self, entry: ConfigEntry, subentry_id: str) -> bool: - """Remove a subentry from a config entry.""" - subentries = dict(entry.subentries) - try: - subentries.pop(subentry_id) - except KeyError as err: - raise UnknownSubEntry from err - - return self._async_update_entry(entry, subentries=subentries) - - def _raise_if_subentry_unique_id_exists( - self, entry: ConfigEntry, unique_id: str | None - ) -> None: - """Raise if a subentry with the same unique_id exists.""" - if unique_id is None: - return - for existing_subentry in entry.subentries.values(): - if existing_subentry.unique_id == unique_id: - raise data_entry_flow.AbortFlow("already_configured") - @callback def _async_dispatch( self, change_type: ConfigEntryChange, entry: ConfigEntry @@ -2757,20 +2585,6 @@ class ConfigFlow(ConfigEntryBaseFlow): """Return options flow support for this handler.""" return cls.async_get_options_flow is not ConfigFlow.async_get_options_flow - @staticmethod - @callback - def async_get_subentry_flow( - config_entry: ConfigEntry, subentry_type: str - ) -> ConfigSubentryFlow: - """Get the subentry flow for this handler.""" - raise NotImplementedError - - @classmethod - @callback - def async_supported_subentries(cls, config_entry: ConfigEntry) -> tuple[str, ...]: - """Return subentries supported by this handler.""" - return () - @callback def _async_abort_entries_match( self, match_dict: dict[str, Any] | None = None @@ -3079,7 +2893,6 @@ class ConfigFlow(ConfigEntryBaseFlow): description: str | None = None, description_placeholders: Mapping[str, str] | None = None, options: Mapping[str, Any] | None = None, - subentries: Iterable[ConfigSubentryData] | None = None, ) -> ConfigFlowResult: """Finish config flow and create a config entry.""" if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: @@ -3099,7 +2912,6 @@ class ConfigFlow(ConfigEntryBaseFlow): result["minor_version"] = self.MINOR_VERSION result["options"] = options or {} - result["subentries"] = subentries or () result["version"] = self.VERSION return result @@ -3214,126 +3026,17 @@ class ConfigFlow(ConfigEntryBaseFlow): ) -class _ConfigSubFlowManager: - """Mixin class for flow managers which manage flows tied to a config entry.""" +class OptionsFlowManager( + data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult] +): + """Flow to set options for a configuration entry.""" - hass: HomeAssistant + _flow_result = ConfigFlowResult def _async_get_config_entry(self, config_entry_id: str) -> ConfigEntry: """Return config entry or raise if not found.""" return self.hass.config_entries.async_get_known_entry(config_entry_id) - -class ConfigSubentryFlowManager( - data_entry_flow.FlowManager[FlowContext, SubentryFlowResult, tuple[str, str]], - _ConfigSubFlowManager, -): - """Manage all the config subentry flows that are in progress.""" - - _flow_result = SubentryFlowResult - - async def async_create_flow( - self, - handler_key: tuple[str, str], - *, - context: FlowContext | None = None, - data: dict[str, Any] | None = None, - ) -> ConfigSubentryFlow: - """Create a subentry flow for a config entry. - - The entry_id and flow.handler[0] is the same thing to map entry with flow. - """ - if not context or "source" not in context: - raise KeyError("Context not set or doesn't have a source set") - - entry_id, subentry_type = handler_key - entry = self._async_get_config_entry(entry_id) - handler = await _async_get_flow_handler(self.hass, entry.domain, {}) - if subentry_type not in handler.async_supported_subentries(entry): - raise data_entry_flow.UnknownHandler( - f"Config entry '{entry.domain}' does not support subentry '{subentry_type}'" - ) - subentry_flow = handler.async_get_subentry_flow(entry, subentry_type) - subentry_flow.init_step = context["source"] - return subentry_flow - - async def async_finish_flow( - self, - flow: data_entry_flow.FlowHandler[ - FlowContext, SubentryFlowResult, tuple[str, str] - ], - result: SubentryFlowResult, - ) -> SubentryFlowResult: - """Finish a subentry flow and add a new subentry to the configuration entry. - - The flow.handler[0] and entry_id is the same thing to map flow with entry. - """ - flow = cast(ConfigSubentryFlow, flow) - - if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: - return result - - entry_id = flow.handler[0] - entry = self.hass.config_entries.async_get_entry(entry_id) - if entry is None: - raise UnknownEntry(entry_id) - - unique_id = result.get("unique_id") - if unique_id is not None and not isinstance(unique_id, str): - raise HomeAssistantError("unique_id must be a string") - - self.hass.config_entries.async_add_subentry( - entry, - ConfigSubentry( - data=MappingProxyType(result["data"]), - title=result["title"], - unique_id=unique_id, - ), - ) - - result["result"] = True - return result - - -class ConfigSubentryFlow( - data_entry_flow.FlowHandler[FlowContext, SubentryFlowResult, tuple[str, str]] -): - """Base class for config subentry flows.""" - - _flow_result = SubentryFlowResult - handler: tuple[str, str] - - @callback - def async_create_entry( - self, - *, - title: str | None = None, - data: Mapping[str, Any], - description: str | None = None, - description_placeholders: Mapping[str, str] | None = None, - unique_id: str | None = None, - ) -> SubentryFlowResult: - """Finish config flow and create a config entry.""" - result = super().async_create_entry( - title=title, - data=data, - description=description, - description_placeholders=description_placeholders, - ) - - result["unique_id"] = unique_id - - return result - - -class OptionsFlowManager( - data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult], - _ConfigSubFlowManager, -): - """Manage all the config entry option flows that are in progress.""" - - _flow_result = ConfigFlowResult - async def async_create_flow( self, handler_key: str, @@ -3343,7 +3046,7 @@ class OptionsFlowManager( ) -> OptionsFlow: """Create an options flow for a config entry. - The entry_id and the flow.handler is the same thing to map entry with flow. + Entry_id and flow.handler is the same thing to map entry with flow. """ entry = self._async_get_config_entry(handler_key) handler = await _async_get_flow_handler(self.hass, entry.domain, {}) @@ -3359,7 +3062,7 @@ class OptionsFlowManager( This method is called when a flow step returns FlowResultType.ABORT or FlowResultType.CREATE_ENTRY. - The flow.handler and the entry_id is the same thing to map flow with entry. + Flow.handler and entry_id is the same thing to map flow with entry. """ flow = cast(OptionsFlow, flow) diff --git a/homeassistant/helpers/data_entry_flow.py b/homeassistant/helpers/data_entry_flow.py index e98061d50b7..adb2062a8ea 100644 --- a/homeassistant/helpers/data_entry_flow.py +++ b/homeassistant/helpers/data_entry_flow.py @@ -18,7 +18,7 @@ from . import config_validation as cv _FlowManagerT = TypeVar( "_FlowManagerT", - bound=data_entry_flow.FlowManager[Any, Any, Any], + bound=data_entry_flow.FlowManager[Any, Any], default=data_entry_flow.FlowManager, ) @@ -71,7 +71,7 @@ class FlowManagerIndexView(_BaseFlowManagerView[_FlowManagerT]): async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Initialize a POST request. - Override `post` and call `_post_impl` in subclasses which need + Override `_post_impl` in subclasses which need to implement their own `RequestDataValidator` """ return await self._post_impl(request, data) diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index 078c649666d..2fb70b6e0be 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -285,15 +285,6 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: "user" if integration.integration_type == "helper" else None ), ), - vol.Optional("config_subentries"): cv.schema_with_slug_keys( - gen_data_entry_schema( - config=config, - integration=integration, - flow_title=REQUIRED, - require_step_title=False, - ), - slug_validator=vol.Any("_", cv.slug), - ), vol.Optional("options"): gen_data_entry_schema( config=config, integration=integration, diff --git a/tests/common.py b/tests/common.py index d2b0dff8faa..ac6f10b8c44 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1000,7 +1000,6 @@ class MockConfigEntry(config_entries.ConfigEntry): reason=None, source=config_entries.SOURCE_USER, state=None, - subentries_data=None, title="Mock Title", unique_id=None, version=1, @@ -1017,7 +1016,6 @@ class MockConfigEntry(config_entries.ConfigEntry): "options": options or {}, "pref_disable_new_entities": pref_disable_new_entities, "pref_disable_polling": pref_disable_polling, - "subentries_data": subentries_data or (), "title": title, "unique_id": unique_id, "version": version, diff --git a/tests/components/aemet/snapshots/test_diagnostics.ambr b/tests/components/aemet/snapshots/test_diagnostics.ambr index 1e09a372352..54546507dfa 100644 --- a/tests/components/aemet/snapshots/test_diagnostics.ambr +++ b/tests/components/aemet/snapshots/test_diagnostics.ambr @@ -21,8 +21,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airly/snapshots/test_diagnostics.ambr b/tests/components/airly/snapshots/test_diagnostics.ambr index 1c760eaec52..ec501b2fd7e 100644 --- a/tests/components/airly/snapshots/test_diagnostics.ambr +++ b/tests/components/airly/snapshots/test_diagnostics.ambr @@ -19,8 +19,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Home', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airnow/snapshots/test_diagnostics.ambr b/tests/components/airnow/snapshots/test_diagnostics.ambr index 73ba6a7123f..3dd4788dc61 100644 --- a/tests/components/airnow/snapshots/test_diagnostics.ambr +++ b/tests/components/airnow/snapshots/test_diagnostics.ambr @@ -35,8 +35,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/airvisual/snapshots/test_diagnostics.ambr b/tests/components/airvisual/snapshots/test_diagnostics.ambr index 0dbdef1d508..606d6082351 100644 --- a/tests/components/airvisual/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual/snapshots/test_diagnostics.ambr @@ -47,8 +47,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 3, diff --git a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr index 113db6e3b96..cb1d3a7aee7 100644 --- a/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr +++ b/tests/components/airvisual_pro/snapshots/test_diagnostics.ambr @@ -101,8 +101,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'XXXXXXX', 'version': 1, diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index 39668e3d19f..fb4f6530b1e 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -287,8 +287,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr index 4bd7bfaccdd..c6ad36916bf 100644 --- a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr @@ -101,8 +101,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'installation1', 'version': 1, diff --git a/tests/components/ambient_station/snapshots/test_diagnostics.ambr b/tests/components/ambient_station/snapshots/test_diagnostics.ambr index 07db19101ab..2f90b09d39f 100644 --- a/tests/components/ambient_station/snapshots/test_diagnostics.ambr +++ b/tests/components/ambient_station/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/axis/snapshots/test_diagnostics.ambr b/tests/components/axis/snapshots/test_diagnostics.ambr index b475c796d2b..ebd0061f416 100644 --- a/tests/components/axis/snapshots/test_diagnostics.ambr +++ b/tests/components/axis/snapshots/test_diagnostics.ambr @@ -47,8 +47,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 3, diff --git a/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr index d7f9a045921..e9540b5cec6 100644 --- a/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr +++ b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr @@ -18,8 +18,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Beosound Balance-11111111', 'unique_id': '11111111', 'version': 1, diff --git a/tests/components/blink/snapshots/test_diagnostics.ambr b/tests/components/blink/snapshots/test_diagnostics.ambr index 54df2b48cdb..edc2879a66b 100644 --- a/tests/components/blink/snapshots/test_diagnostics.ambr +++ b/tests/components/blink/snapshots/test_diagnostics.ambr @@ -48,8 +48,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 3, diff --git a/tests/components/braviatv/snapshots/test_diagnostics.ambr b/tests/components/braviatv/snapshots/test_diagnostics.ambr index de76c00cd23..cd29c647df7 100644 --- a/tests/components/braviatv/snapshots/test_diagnostics.ambr +++ b/tests/components/braviatv/snapshots/test_diagnostics.ambr @@ -19,8 +19,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'very_unique_string', 'version': 1, diff --git a/tests/components/co2signal/snapshots/test_diagnostics.ambr b/tests/components/co2signal/snapshots/test_diagnostics.ambr index 4159c8ec1a1..9218e7343ec 100644 --- a/tests/components/co2signal/snapshots/test_diagnostics.ambr +++ b/tests/components/co2signal/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/coinbase/snapshots/test_diagnostics.ambr b/tests/components/coinbase/snapshots/test_diagnostics.ambr index 3eab18fb9f3..51bd946f140 100644 --- a/tests/components/coinbase/snapshots/test_diagnostics.ambr +++ b/tests/components/coinbase/snapshots/test_diagnostics.ambr @@ -44,8 +44,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/comelit/snapshots/test_diagnostics.ambr b/tests/components/comelit/snapshots/test_diagnostics.ambr index 877f48a4611..58ce74035f9 100644 --- a/tests/components/comelit/snapshots/test_diagnostics.ambr +++ b/tests/components/comelit/snapshots/test_diagnostics.ambr @@ -71,8 +71,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, @@ -137,8 +135,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 0a1ffbe87b3..ee000c5ada2 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -137,13 +137,11 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": core_ce.ConfigEntryState.NOT_LOADED.value, - "supported_subentries": [], "supports_options": True, "supports_reconfigure": False, "supports_remove_device": False, @@ -157,13 +155,11 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": core_ce.ConfigEntryState.SETUP_ERROR.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -177,13 +173,11 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": core_ce.ConfigEntryState.NOT_LOADED.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -197,13 +191,11 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": core_ce.ConfigEntryState.NOT_LOADED.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -217,13 +209,11 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": core_ce.ConfigEntryState.NOT_LOADED.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -581,13 +571,11 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -598,7 +586,6 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "description_placeholders": None, "options": {}, "minor_version": 1, - "subentries": [], } @@ -667,13 +654,11 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": core_ce.SOURCE_USER, "state": core_ce.ConfigEntryState.LOADED.value, - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -684,7 +669,6 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "description_placeholders": None, "options": {}, "minor_version": 1, - "subentries": [], } @@ -1104,273 +1088,6 @@ async def test_options_flow_with_invalid_data( assert data == {"errors": {"choices": "invalid is not a valid option"}} -async def test_subentry_flow(hass: HomeAssistant, client) -> None: - """Test we can start a subentry flow.""" - - class TestFlow(core_ce.ConfigFlow): - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - class SubentryFlowHandler(core_ce.ConfigSubentryFlow): - async def async_step_init(self, user_input=None): - raise NotImplementedError - - async def async_step_user(self, user_input=None): - schema = OrderedDict() - schema[vol.Required("enabled")] = bool - return self.async_show_form( - step_id="user", - data_schema=schema, - description_placeholders={"enabled": "Set to true to be true"}, - ) - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries(cls, config_entry): - return ("test",) - - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - MockConfigEntry( - domain="test", - entry_id="test1", - source="bla", - ).add_to_hass(hass) - entry = hass.config_entries.async_entries()[0] - - with patch.dict(HANDLERS, {"test": TestFlow}): - url = "/api/config/config_entries/subentries/flow" - resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - data.pop("flow_id") - assert data == { - "type": "form", - "handler": ["test1", "test"], - "step_id": "user", - "data_schema": [{"name": "enabled", "required": True, "type": "boolean"}], - "description_placeholders": {"enabled": "Set to true to be true"}, - "errors": None, - "last_step": None, - "preview": None, - } - - -@pytest.mark.parametrize( - ("endpoint", "method"), - [ - ("/api/config/config_entries/subentries/flow", "post"), - ("/api/config/config_entries/subentries/flow/1", "get"), - ("/api/config/config_entries/subentries/flow/1", "post"), - ], -) -async def test_subentry_flow_unauth( - hass: HomeAssistant, client, hass_admin_user: MockUser, endpoint: str, method: str -) -> None: - """Test unauthorized on subentry flow.""" - - class TestFlow(core_ce.ConfigFlow): - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - class SubentryFlowHandler(core_ce.ConfigSubentryFlow): - async def async_step_init(self, user_input=None): - schema = OrderedDict() - schema[vol.Required("enabled")] = bool - return self.async_show_form( - step_id="user", - data_schema=schema, - description_placeholders={"enabled": "Set to true to be true"}, - ) - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries(cls, config_entry): - return ("test",) - - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - MockConfigEntry( - domain="test", - entry_id="test1", - source="bla", - ).add_to_hass(hass) - entry = hass.config_entries.async_entries()[0] - - hass_admin_user.groups = [] - - with patch.dict(HANDLERS, {"test": TestFlow}): - resp = await getattr(client, method)(endpoint, json={"handler": entry.entry_id}) - - assert resp.status == HTTPStatus.UNAUTHORIZED - - -async def test_two_step_subentry_flow(hass: HomeAssistant, client) -> None: - """Test we can finish a two step subentry flow.""" - mock_integration( - hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) - ) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(core_ce.ConfigFlow): - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - class SubentryFlowHandler(core_ce.ConfigSubentryFlow): - async def async_step_user(self, user_input=None): - return await self.async_step_finish() - - async def async_step_finish(self, user_input=None): - if user_input: - return self.async_create_entry( - title="Mock title", data=user_input, unique_id="test" - ) - - return self.async_show_form( - step_id="finish", data_schema=vol.Schema({"enabled": bool}) - ) - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries(cls, config_entry): - return ("test",) - - MockConfigEntry( - domain="test", - entry_id="test1", - source="bla", - ).add_to_hass(hass) - entry = hass.config_entries.async_entries()[0] - - with patch.dict(HANDLERS, {"test": TestFlow}): - url = "/api/config/config_entries/subentries/flow" - resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - flow_id = data["flow_id"] - expected_data = { - "data_schema": [{"name": "enabled", "type": "boolean"}], - "description_placeholders": None, - "errors": None, - "flow_id": flow_id, - "handler": ["test1", "test"], - "last_step": None, - "preview": None, - "step_id": "finish", - "type": "form", - } - assert data == expected_data - - resp = await client.get(f"/api/config/config_entries/subentries/flow/{flow_id}") - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == expected_data - - resp = await client.post( - f"/api/config/config_entries/subentries/flow/{flow_id}", - json={"enabled": True}, - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == { - "description_placeholders": None, - "description": None, - "flow_id": flow_id, - "handler": ["test1", "test"], - "title": "Mock title", - "type": "create_entry", - "unique_id": "test", - } - - -async def test_subentry_flow_with_invalid_data(hass: HomeAssistant, client) -> None: - """Test a subentry flow with invalid_data.""" - mock_integration( - hass, MockModule("test", async_setup_entry=AsyncMock(return_value=True)) - ) - mock_platform(hass, "test.config_flow", None) - - class TestFlow(core_ce.ConfigFlow): - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - class SubentryFlowHandler(core_ce.ConfigSubentryFlow): - async def async_step_user(self, user_input=None): - return self.async_show_form( - step_id="finish", - data_schema=vol.Schema( - { - vol.Required( - "choices", default=["invalid", "valid"] - ): cv.multi_select({"valid": "Valid"}) - } - ), - ) - - async def async_step_finish(self, user_input=None): - return self.async_create_entry( - title="Enable disable", data=user_input - ) - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries(cls, config_entry): - return ("test",) - - MockConfigEntry( - domain="test", - entry_id="test1", - source="bla", - ).add_to_hass(hass) - entry = hass.config_entries.async_entries()[0] - - with patch.dict(HANDLERS, {"test": TestFlow}): - url = "/api/config/config_entries/subentries/flow" - resp = await client.post(url, json={"handler": [entry.entry_id, "test"]}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - flow_id = data.pop("flow_id") - assert data == { - "type": "form", - "handler": ["test1", "test"], - "step_id": "finish", - "data_schema": [ - { - "default": ["invalid", "valid"], - "name": "choices", - "options": {"valid": "Valid"}, - "required": True, - "type": "multi_select", - } - ], - "description_placeholders": None, - "errors": None, - "last_step": None, - "preview": None, - } - - with patch.dict(HANDLERS, {"test": TestFlow}): - resp = await client.post( - f"/api/config/config_entries/subentries/flow/{flow_id}", - json={"choices": ["valid", "invalid"]}, - ) - assert resp.status == HTTPStatus.BAD_REQUEST - data = await resp.json() - assert data == {"errors": {"choices": "invalid is not a valid option"}} - - @pytest.mark.usefixtures("freezer") async def test_get_single( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -1403,13 +1120,11 @@ async def test_get_single( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "user", "state": "loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1765,13 +1480,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1786,13 +1499,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1807,13 +1518,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1828,13 +1537,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1849,13 +1556,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1881,13 +1586,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1912,13 +1615,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1933,13 +1634,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1964,13 +1663,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -1985,13 +1682,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2022,13 +1717,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2043,13 +1736,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2064,13 +1755,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2085,13 +1774,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla4", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2106,13 +1793,11 @@ async def test_get_matching_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": timestamp, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla5", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2215,13 +1900,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2239,13 +1922,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", "source": "bla2", "state": "setup_error", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2263,13 +1944,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2293,13 +1972,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2324,13 +2001,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2354,13 +2029,11 @@ async def test_subscribe_entries_ws( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": entry.modified_at.timestamp(), - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2446,13 +2119,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2470,13 +2141,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": created, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2502,13 +2171,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2530,13 +2197,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla3", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2562,13 +2227,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": modified, - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2592,13 +2255,11 @@ async def test_subscribe_entries_ws_filtered( "error_reason_translation_key": None, "error_reason_translation_placeholders": None, "modified_at": entry.modified_at.timestamp(), - "num_subentries": 0, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, "source": "bla", "state": "not_loaded", - "supported_subentries": [], "supports_options": False, "supports_reconfigure": False, "supports_remove_device": False, @@ -2809,133 +2470,3 @@ async def test_does_not_support_reconfigure( response == '{"message":"Handler ConfigEntriesFlowManager doesn\'t support step reconfigure"}' ) - - -async def test_list_subentries( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test that we can list subentries.""" - assert await async_setup_component(hass, "config", {}) - ws_client = await hass_ws_client(hass) - - entry = MockConfigEntry( - domain="test", - state=core_ce.ConfigEntryState.LOADED, - subentries_data=[ - core_ce.ConfigSubentryData( - data={"test": "test"}, - subentry_id="mock_id", - title="Mock title", - unique_id="test", - ) - ], - ) - entry.add_to_hass(hass) - - assert entry.pref_disable_new_entities is False - assert entry.pref_disable_polling is False - - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/list", - "entry_id": entry.entry_id, - } - ) - response = await ws_client.receive_json() - - assert response["success"] - assert response["result"] == [ - {"subentry_id": "mock_id", "title": "Mock title", "unique_id": "test"}, - ] - - # Try listing subentries for an unknown entry - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/list", - "entry_id": "no_such_entry", - } - ) - response = await ws_client.receive_json() - - assert not response["success"] - assert response["error"] == { - "code": "not_found", - "message": "Config entry not found", - } - - -async def test_delete_subentry( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test that we can delete a subentry.""" - assert await async_setup_component(hass, "config", {}) - ws_client = await hass_ws_client(hass) - - entry = MockConfigEntry( - domain="test", - state=core_ce.ConfigEntryState.LOADED, - subentries_data=[ - core_ce.ConfigSubentryData( - data={"test": "test"}, subentry_id="mock_id", title="Mock title" - ) - ], - ) - entry.add_to_hass(hass) - - assert entry.pref_disable_new_entities is False - assert entry.pref_disable_polling is False - - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/delete", - "entry_id": entry.entry_id, - "subentry_id": "mock_id", - } - ) - response = await ws_client.receive_json() - - assert response["success"] - assert response["result"] is None - - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/list", - "entry_id": entry.entry_id, - } - ) - response = await ws_client.receive_json() - - assert response["success"] - assert response["result"] == [] - - # Try deleting the subentry again - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/delete", - "entry_id": entry.entry_id, - "subentry_id": "mock_id", - } - ) - response = await ws_client.receive_json() - - assert not response["success"] - assert response["error"] == { - "code": "not_found", - "message": "Config subentry not found", - } - - # Try deleting subentry from an unknown entry - await ws_client.send_json_auto_id( - { - "type": "config_entries/subentries/delete", - "entry_id": "no_such_entry", - "subentry_id": "mock_id", - } - ) - response = await ws_client.receive_json() - - assert not response["success"] - assert response["error"] == { - "code": "not_found", - "message": "Config entry not found", - } diff --git a/tests/components/deconz/snapshots/test_diagnostics.ambr b/tests/components/deconz/snapshots/test_diagnostics.ambr index 20558b4bbbd..1ca674a4fbe 100644 --- a/tests/components/deconz/snapshots/test_diagnostics.ambr +++ b/tests/components/deconz/snapshots/test_diagnostics.ambr @@ -21,8 +21,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr index 0e507ca0b28..abedc128756 100644 --- a/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_control/snapshots/test_diagnostics.ambr @@ -47,8 +47,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '123456', 'version': 1, diff --git a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr index 1288b7f3ef6..53940bf5119 100644 --- a/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr +++ b/tests/components/devolo_home_network/snapshots/test_diagnostics.ambr @@ -32,8 +32,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '1234567890', 'version': 1, diff --git a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr index 0a46dd7f476..d407fe2dc5b 100644 --- a/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr +++ b/tests/components/dsmr_reader/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'dsmr_reader', 'unique_id': 'UNIQUE_TEST_ID', 'version': 1, diff --git a/tests/components/ecovacs/snapshots/test_diagnostics.ambr b/tests/components/ecovacs/snapshots/test_diagnostics.ambr index f9540e06038..38c8a9a5ab9 100644 --- a/tests/components/ecovacs/snapshots/test_diagnostics.ambr +++ b/tests/components/ecovacs/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, @@ -72,8 +70,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/energyzero/snapshots/test_config_flow.ambr b/tests/components/energyzero/snapshots/test_config_flow.ambr index 88b0af6dc7b..72e504c97c8 100644 --- a/tests/components/energyzero/snapshots/test_config_flow.ambr +++ b/tests/components/energyzero/snapshots/test_config_flow.ambr @@ -28,14 +28,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'EnergyZero', 'unique_id': 'energyzero', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'EnergyZero', 'type': , 'version': 1, diff --git a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr index 3cacd3a8518..76835098f27 100644 --- a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr +++ b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr @@ -20,8 +20,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, @@ -456,8 +454,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, @@ -932,8 +928,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/esphome/snapshots/test_diagnostics.ambr b/tests/components/esphome/snapshots/test_diagnostics.ambr index 8f1711e829e..4f7ea679b20 100644 --- a/tests/components/esphome/snapshots/test_diagnostics.ambr +++ b/tests/components/esphome/snapshots/test_diagnostics.ambr @@ -20,8 +20,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'ESPHome Device', 'unique_id': '11:22:33:44:55:aa', 'version': 1, diff --git a/tests/components/esphome/test_diagnostics.py b/tests/components/esphome/test_diagnostics.py index 0beeae71df3..832e7d6572f 100644 --- a/tests/components/esphome/test_diagnostics.py +++ b/tests/components/esphome/test_diagnostics.py @@ -79,7 +79,6 @@ async def test_diagnostics_with_bluetooth( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", - "subentries": [], "title": "Mock Title", "unique_id": "11:22:33:44:55:aa", "version": 1, diff --git a/tests/components/forecast_solar/snapshots/test_init.ambr b/tests/components/forecast_solar/snapshots/test_init.ambr index c0db54c2d4e..6ae4c2f6198 100644 --- a/tests/components/forecast_solar/snapshots/test_init.ambr +++ b/tests/components/forecast_solar/snapshots/test_init.ambr @@ -23,8 +23,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Green House', 'unique_id': 'unique', 'version': 2, diff --git a/tests/components/fritz/snapshots/test_diagnostics.ambr b/tests/components/fritz/snapshots/test_diagnostics.ambr index 9b5b8c9353a..53f7093a21b 100644 --- a/tests/components/fritz/snapshots/test_diagnostics.ambr +++ b/tests/components/fritz/snapshots/test_diagnostics.ambr @@ -61,8 +61,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/fronius/snapshots/test_diagnostics.ambr b/tests/components/fronius/snapshots/test_diagnostics.ambr index b112839835a..010de06e276 100644 --- a/tests/components/fronius/snapshots/test_diagnostics.ambr +++ b/tests/components/fronius/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index f1792cb7535..eb19797e5b1 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -19,8 +19,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'fyta_user', 'unique_id': None, 'version': 1, diff --git a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr index 10f23759fae..6d521b1f2c8 100644 --- a/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr +++ b/tests/components/gardena_bluetooth/snapshots/test_config_flow.ambr @@ -66,14 +66,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'bluetooth', - 'subentries': list([ - ]), 'title': 'Gardena Water Computer', 'unique_id': '00000000-0000-0000-0000-000000000001', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Gardena Water Computer', 'type': , 'version': 1, @@ -227,14 +223,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Gardena Water Computer', 'unique_id': '00000000-0000-0000-0000-000000000001', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Gardena Water Computer', 'type': , 'version': 1, diff --git a/tests/components/gios/snapshots/test_diagnostics.ambr b/tests/components/gios/snapshots/test_diagnostics.ambr index 890edc00482..71e0afdc495 100644 --- a/tests/components/gios/snapshots/test_diagnostics.ambr +++ b/tests/components/gios/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Home', 'unique_id': '123', 'version': 1, diff --git a/tests/components/goodwe/snapshots/test_diagnostics.ambr b/tests/components/goodwe/snapshots/test_diagnostics.ambr index 40ed22195d5..f52e47688e8 100644 --- a/tests/components/goodwe/snapshots/test_diagnostics.ambr +++ b/tests/components/goodwe/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/google_assistant/snapshots/test_diagnostics.ambr b/tests/components/google_assistant/snapshots/test_diagnostics.ambr index 1ecedbd1173..edbbdb1ba28 100644 --- a/tests/components/google_assistant/snapshots/test_diagnostics.ambr +++ b/tests/components/google_assistant/snapshots/test_diagnostics.ambr @@ -15,8 +15,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'import', - 'subentries': list([ - ]), 'title': '1234', 'unique_id': '1234', 'version': 1, diff --git a/tests/components/guardian/test_diagnostics.py b/tests/components/guardian/test_diagnostics.py index 4487d0b6ac6..faba2103000 100644 --- a/tests/components/guardian/test_diagnostics.py +++ b/tests/components/guardian/test_diagnostics.py @@ -42,7 +42,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "data": { "valve_controller": { diff --git a/tests/components/homewizard/snapshots/test_config_flow.ambr b/tests/components/homewizard/snapshots/test_config_flow.ambr index 71e70f3a153..0a301fc3941 100644 --- a/tests/components/homewizard/snapshots/test_config_flow.ambr +++ b/tests/components/homewizard/snapshots/test_config_flow.ambr @@ -30,14 +30,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', - 'subentries': list([ - ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'P1 meter', 'type': , 'version': 1, @@ -78,14 +74,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', - 'subentries': list([ - ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'P1 meter', 'type': , 'version': 1, @@ -126,14 +118,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', - 'subentries': list([ - ]), 'title': 'Energy Socket', 'unique_id': 'HWE-SKT_5c2fafabcdef', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Energy Socket', 'type': , 'version': 1, @@ -170,14 +158,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'P1 meter', 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'P1 meter', 'type': , 'version': 1, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index 2dab82451a6..a4dc986c2f9 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -183,8 +183,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Husqvarna Automower of Erika Mustermann', 'unique_id': '123', 'version': 1, diff --git a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr index f15fc706d7e..494980ba4ce 100644 --- a/tests/components/imgw_pib/snapshots/test_diagnostics.ambr +++ b/tests/components/imgw_pib/snapshots/test_diagnostics.ambr @@ -15,8 +15,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'River Name (Station Name)', 'unique_id': '123', 'version': 1, diff --git a/tests/components/iqvia/snapshots/test_diagnostics.ambr b/tests/components/iqvia/snapshots/test_diagnostics.ambr index 41cfedb0e29..f2fa656cb0f 100644 --- a/tests/components/iqvia/snapshots/test_diagnostics.ambr +++ b/tests/components/iqvia/snapshots/test_diagnostics.ambr @@ -358,8 +358,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/kostal_plenticore/test_diagnostics.py b/tests/components/kostal_plenticore/test_diagnostics.py index 3a99a7f681d..08f06684d9a 100644 --- a/tests/components/kostal_plenticore/test_diagnostics.py +++ b/tests/components/kostal_plenticore/test_diagnostics.py @@ -57,7 +57,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "client": { "version": "api_version='0.2.0' hostname='scb' name='PUCK RESTful API' sw_version='01.16.05025'", diff --git a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr index 640726e2355..201bbbc971e 100644 --- a/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr +++ b/tests/components/lacrosse_view/snapshots/test_diagnostics.ambr @@ -25,8 +25,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr index db82f41eb73..c689d04949a 100644 --- a/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr +++ b/tests/components/linear_garage_door/snapshots/test_diagnostics.ambr @@ -73,8 +73,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'test-site-name', 'unique_id': None, 'version': 1, diff --git a/tests/components/madvr/snapshots/test_diagnostics.ambr b/tests/components/madvr/snapshots/test_diagnostics.ambr index 92d0578dba8..3a281391860 100644 --- a/tests/components/madvr/snapshots/test_diagnostics.ambr +++ b/tests/components/madvr/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'envy', 'unique_id': '00:11:22:33:44:55', 'version': 1, diff --git a/tests/components/melcloud/snapshots/test_diagnostics.ambr b/tests/components/melcloud/snapshots/test_diagnostics.ambr index 671f5afcc52..e6a432de07e 100644 --- a/tests/components/melcloud/snapshots/test_diagnostics.ambr +++ b/tests/components/melcloud/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'melcloud', 'unique_id': 'UNIQUE_TEST_ID', 'version': 1, diff --git a/tests/components/modern_forms/snapshots/test_diagnostics.ambr b/tests/components/modern_forms/snapshots/test_diagnostics.ambr index 1b4090ca5a4..f8897a4a47f 100644 --- a/tests/components/modern_forms/snapshots/test_diagnostics.ambr +++ b/tests/components/modern_forms/snapshots/test_diagnostics.ambr @@ -16,8 +16,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'AA:BB:CC:DD:EE:FF', 'version': 1, diff --git a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr index d042dc02ac3..5b4b169c0fe 100644 --- a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr +++ b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr @@ -28,8 +28,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/netatmo/snapshots/test_diagnostics.ambr b/tests/components/netatmo/snapshots/test_diagnostics.ambr index 4ea7e30bcf9..463556ec657 100644 --- a/tests/components/netatmo/snapshots/test_diagnostics.ambr +++ b/tests/components/netatmo/snapshots/test_diagnostics.ambr @@ -646,8 +646,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'netatmo', 'version': 1, diff --git a/tests/components/nextdns/snapshots/test_diagnostics.ambr b/tests/components/nextdns/snapshots/test_diagnostics.ambr index 23f42fee077..827d6aeb6e5 100644 --- a/tests/components/nextdns/snapshots/test_diagnostics.ambr +++ b/tests/components/nextdns/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Fake Profile', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr index b33726d2b72..f4ba363a421 100644 --- a/tests/components/nice_go/snapshots/test_diagnostics.ambr +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -60,8 +60,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/notion/test_diagnostics.py b/tests/components/notion/test_diagnostics.py index c1d1bd1bb2e..890ce2dfc4a 100644 --- a/tests/components/notion/test_diagnostics.py +++ b/tests/components/notion/test_diagnostics.py @@ -37,7 +37,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "data": { "bridges": [ diff --git a/tests/components/onvif/snapshots/test_diagnostics.ambr b/tests/components/onvif/snapshots/test_diagnostics.ambr index c3938efcbb6..c8a9ff75d62 100644 --- a/tests/components/onvif/snapshots/test_diagnostics.ambr +++ b/tests/components/onvif/snapshots/test_diagnostics.ambr @@ -24,8 +24,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, diff --git a/tests/components/openuv/test_diagnostics.py b/tests/components/openuv/test_diagnostics.py index 03b392b3e7b..61b68b5ad90 100644 --- a/tests/components/openuv/test_diagnostics.py +++ b/tests/components/openuv/test_diagnostics.py @@ -39,7 +39,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "data": { "protection_window": { diff --git a/tests/components/p1_monitor/snapshots/test_init.ambr b/tests/components/p1_monitor/snapshots/test_init.ambr index 83684e153c9..d0a676fce1b 100644 --- a/tests/components/p1_monitor/snapshots/test_init.ambr +++ b/tests/components/p1_monitor/snapshots/test_init.ambr @@ -16,8 +16,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'unique_thingy', 'version': 2, @@ -40,8 +38,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'unique_thingy', 'version': 2, diff --git a/tests/components/pegel_online/snapshots/test_diagnostics.ambr b/tests/components/pegel_online/snapshots/test_diagnostics.ambr index d0fdc81acb4..1e55805f867 100644 --- a/tests/components/pegel_online/snapshots/test_diagnostics.ambr +++ b/tests/components/pegel_online/snapshots/test_diagnostics.ambr @@ -31,8 +31,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '70272185-xxxx-xxxx-xxxx-43bea330dcae', 'version': 1, diff --git a/tests/components/philips_js/snapshots/test_diagnostics.ambr b/tests/components/philips_js/snapshots/test_diagnostics.ambr index 53db95f0534..4f7a6176634 100644 --- a/tests/components/philips_js/snapshots/test_diagnostics.ambr +++ b/tests/components/philips_js/snapshots/test_diagnostics.ambr @@ -94,8 +94,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/philips_js/test_config_flow.py b/tests/components/philips_js/test_config_flow.py index 4b8048a8ebe..80d05961813 100644 --- a/tests/components/philips_js/test_config_flow.py +++ b/tests/components/philips_js/test_config_flow.py @@ -155,7 +155,6 @@ async def test_pairing(hass: HomeAssistant, mock_tv_pairable, mock_setup_entry) "version": 1, "options": {}, "minor_version": 1, - "subentries": (), } await hass.async_block_till_done() diff --git a/tests/components/pi_hole/snapshots/test_diagnostics.ambr b/tests/components/pi_hole/snapshots/test_diagnostics.ambr index 2d6f6687d04..3094fcef24b 100644 --- a/tests/components/pi_hole/snapshots/test_diagnostics.ambr +++ b/tests/components/pi_hole/snapshots/test_diagnostics.ambr @@ -33,8 +33,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/proximity/snapshots/test_diagnostics.ambr b/tests/components/proximity/snapshots/test_diagnostics.ambr index 42ec74710f9..3d9673ffd90 100644 --- a/tests/components/proximity/snapshots/test_diagnostics.ambr +++ b/tests/components/proximity/snapshots/test_diagnostics.ambr @@ -102,8 +102,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'home', 'unique_id': 'proximity_home', 'version': 1, diff --git a/tests/components/ps4/test_init.py b/tests/components/ps4/test_init.py index 24d45fee5b9..d14f367b2bd 100644 --- a/tests/components/ps4/test_init.py +++ b/tests/components/ps4/test_init.py @@ -52,7 +52,6 @@ MOCK_FLOW_RESULT = { "title": "test_ps4", "data": MOCK_DATA, "options": {}, - "subentries": (), } MOCK_ENTRY_ID = "SomeID" diff --git a/tests/components/purpleair/test_diagnostics.py b/tests/components/purpleair/test_diagnostics.py index 6271a63d652..ae4b28567be 100644 --- a/tests/components/purpleair/test_diagnostics.py +++ b/tests/components/purpleair/test_diagnostics.py @@ -38,7 +38,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "data": { "fields": [ diff --git a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr index abf8e380916..e131bf3d952 100644 --- a/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr +++ b/tests/components/rainforest_raven/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, @@ -86,8 +84,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/rainmachine/snapshots/test_diagnostics.ambr b/tests/components/rainmachine/snapshots/test_diagnostics.ambr index 681805996f1..acd5fd165b4 100644 --- a/tests/components/rainmachine/snapshots/test_diagnostics.ambr +++ b/tests/components/rainmachine/snapshots/test_diagnostics.ambr @@ -1144,8 +1144,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 2, @@ -2277,8 +2275,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/recollect_waste/test_diagnostics.py b/tests/components/recollect_waste/test_diagnostics.py index a57e289ec04..24c690bcb37 100644 --- a/tests/components/recollect_waste/test_diagnostics.py +++ b/tests/components/recollect_waste/test_diagnostics.py @@ -34,7 +34,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "data": [ { diff --git a/tests/components/ridwell/snapshots/test_diagnostics.ambr b/tests/components/ridwell/snapshots/test_diagnostics.ambr index 4b4dda7227d..b03d87c7a89 100644 --- a/tests/components/ridwell/snapshots/test_diagnostics.ambr +++ b/tests/components/ridwell/snapshots/test_diagnostics.ambr @@ -44,8 +44,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 2, diff --git a/tests/components/samsungtv/test_diagnostics.py b/tests/components/samsungtv/test_diagnostics.py index e8e0b699a7e..0319d5dd8dd 100644 --- a/tests/components/samsungtv/test_diagnostics.py +++ b/tests/components/samsungtv/test_diagnostics.py @@ -51,7 +51,6 @@ async def test_entry_diagnostics( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", - "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, @@ -92,7 +91,6 @@ async def test_entry_diagnostics_encrypted( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", - "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, @@ -132,7 +130,6 @@ async def test_entry_diagnostics_encrypte_offline( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "user", - "subentries": [], "title": "Mock Title", "unique_id": "any", "version": 2, diff --git a/tests/components/screenlogic/snapshots/test_diagnostics.ambr b/tests/components/screenlogic/snapshots/test_diagnostics.ambr index c7db7a33959..237d3eab257 100644 --- a/tests/components/screenlogic/snapshots/test_diagnostics.ambr +++ b/tests/components/screenlogic/snapshots/test_diagnostics.ambr @@ -18,8 +18,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Pentair: DD-EE-FF', 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, diff --git a/tests/components/simplisafe/test_diagnostics.py b/tests/components/simplisafe/test_diagnostics.py index 13c1e28aa36..d5479f00b06 100644 --- a/tests/components/simplisafe/test_diagnostics.py +++ b/tests/components/simplisafe/test_diagnostics.py @@ -32,7 +32,6 @@ async def test_entry_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, "subscription_data": { "12345": { diff --git a/tests/components/solarlog/snapshots/test_diagnostics.ambr b/tests/components/solarlog/snapshots/test_diagnostics.ambr index 6aef72ebbd5..e0f1bc2623c 100644 --- a/tests/components/solarlog/snapshots/test_diagnostics.ambr +++ b/tests/components/solarlog/snapshots/test_diagnostics.ambr @@ -18,8 +18,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'solarlog', 'unique_id': None, 'version': 1, diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 0b45546902b..6abc544c92a 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -136,7 +136,6 @@ async def test_user_form_pin_not_required( "data": deepcopy(TEST_CONFIG), "options": {}, "minor_version": 1, - "subentries": (), } expected["data"][CONF_PIN] = None @@ -342,7 +341,6 @@ async def test_pin_form_success(hass: HomeAssistant, pin_form) -> None: "data": TEST_CONFIG, "options": {}, "minor_version": 1, - "subentries": (), } result["data"][CONF_DEVICE_ID] = TEST_DEVICE_ID assert result == expected diff --git a/tests/components/switcher_kis/test_diagnostics.py b/tests/components/switcher_kis/test_diagnostics.py index f59958420c4..53572085f9b 100644 --- a/tests/components/switcher_kis/test_diagnostics.py +++ b/tests/components/switcher_kis/test_diagnostics.py @@ -69,6 +69,5 @@ async def test_diagnostics( "created_at": ANY, "modified_at": ANY, "discovery_keys": {}, - "subentries": [], }, } diff --git a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr index afa508cc004..75d942fc601 100644 --- a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr +++ b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr @@ -56,8 +56,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'System Monitor', 'unique_id': None, 'version': 1, @@ -113,8 +111,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'System Monitor', 'unique_id': None, 'version': 1, diff --git a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr index b5b33d7c246..3180c7c0b1d 100644 --- a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr +++ b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr @@ -37,8 +37,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/tractive/snapshots/test_diagnostics.ambr b/tests/components/tractive/snapshots/test_diagnostics.ambr index 3613f7e5997..11427a84801 100644 --- a/tests/components/tractive/snapshots/test_diagnostics.ambr +++ b/tests/components/tractive/snapshots/test_diagnostics.ambr @@ -17,8 +17,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': 'very_unique_string', 'version': 1, diff --git a/tests/components/tuya/snapshots/test_config_flow.ambr b/tests/components/tuya/snapshots/test_config_flow.ambr index 90d83d69814..a5a68a12a22 100644 --- a/tests/components/tuya/snapshots/test_config_flow.ambr +++ b/tests/components/tuya/snapshots/test_config_flow.ambr @@ -24,8 +24,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '12345', 'unique_id': '12345', 'version': 1, @@ -56,8 +54,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Old Tuya configuration entry', 'unique_id': '12345', 'version': 1, @@ -111,14 +107,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'mocked_username', 'unique_id': None, 'version': 1, }), - 'subentries': tuple( - ), 'title': 'mocked_username', 'type': , 'version': 1, diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index e52f76634fd..28ec98cf572 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -37,8 +37,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Twinkly', 'unique_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', 'version': 1, diff --git a/tests/components/unifi/snapshots/test_diagnostics.ambr b/tests/components/unifi/snapshots/test_diagnostics.ambr index aa7337be0ba..4ba90a00113 100644 --- a/tests/components/unifi/snapshots/test_diagnostics.ambr +++ b/tests/components/unifi/snapshots/test_diagnostics.ambr @@ -42,8 +42,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': '1', 'version': 1, diff --git a/tests/components/uptime/snapshots/test_config_flow.ambr b/tests/components/uptime/snapshots/test_config_flow.ambr index 93b1da60998..38312667375 100644 --- a/tests/components/uptime/snapshots/test_config_flow.ambr +++ b/tests/components/uptime/snapshots/test_config_flow.ambr @@ -27,14 +27,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Uptime', 'unique_id': None, 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Uptime', 'type': , 'version': 1, diff --git a/tests/components/utility_meter/snapshots/test_diagnostics.ambr b/tests/components/utility_meter/snapshots/test_diagnostics.ambr index ef235bba99d..6cdf121d7e3 100644 --- a/tests/components/utility_meter/snapshots/test_diagnostics.ambr +++ b/tests/components/utility_meter/snapshots/test_diagnostics.ambr @@ -25,8 +25,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Energy Bill', 'unique_id': None, 'version': 2, diff --git a/tests/components/v2c/snapshots/test_diagnostics.ambr b/tests/components/v2c/snapshots/test_diagnostics.ambr index 780a00acd64..96567b80c54 100644 --- a/tests/components/v2c/snapshots/test_diagnostics.ambr +++ b/tests/components/v2c/snapshots/test_diagnostics.ambr @@ -16,8 +16,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': 'ABC123', 'version': 1, diff --git a/tests/components/vicare/snapshots/test_diagnostics.ambr b/tests/components/vicare/snapshots/test_diagnostics.ambr index 0b1dcef5a29..ae9b05389c7 100644 --- a/tests/components/vicare/snapshots/test_diagnostics.ambr +++ b/tests/components/vicare/snapshots/test_diagnostics.ambr @@ -4731,8 +4731,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': 'ViCare', 'version': 1, diff --git a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr index dd268f4ed1a..c258b14dc2d 100644 --- a/tests/components/vodafone_station/snapshots/test_diagnostics.ambr +++ b/tests/components/vodafone_station/snapshots/test_diagnostics.ambr @@ -35,8 +35,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/watttime/snapshots/test_diagnostics.ambr b/tests/components/watttime/snapshots/test_diagnostics.ambr index 3cc5e1d6f66..0c137acc36b 100644 --- a/tests/components/watttime/snapshots/test_diagnostics.ambr +++ b/tests/components/watttime/snapshots/test_diagnostics.ambr @@ -27,8 +27,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': '**REDACTED**', 'version': 1, diff --git a/tests/components/webmin/snapshots/test_diagnostics.ambr b/tests/components/webmin/snapshots/test_diagnostics.ambr index c64fa212a98..8299b0eafba 100644 --- a/tests/components/webmin/snapshots/test_diagnostics.ambr +++ b/tests/components/webmin/snapshots/test_diagnostics.ambr @@ -253,8 +253,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': '**REDACTED**', 'unique_id': None, 'version': 1, diff --git a/tests/components/webostv/test_diagnostics.py b/tests/components/webostv/test_diagnostics.py index 7f54e940966..3d7cb00e021 100644 --- a/tests/components/webostv/test_diagnostics.py +++ b/tests/components/webostv/test_diagnostics.py @@ -61,6 +61,5 @@ async def test_diagnostics( "created_at": entry.created_at.isoformat(), "modified_at": entry.modified_at.isoformat(), "discovery_keys": {}, - "subentries": [], }, } diff --git a/tests/components/whirlpool/snapshots/test_diagnostics.ambr b/tests/components/whirlpool/snapshots/test_diagnostics.ambr index ee8abe04bf1..c60ce17b952 100644 --- a/tests/components/whirlpool/snapshots/test_diagnostics.ambr +++ b/tests/components/whirlpool/snapshots/test_diagnostics.ambr @@ -38,8 +38,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/whois/snapshots/test_config_flow.ambr b/tests/components/whois/snapshots/test_config_flow.ambr index 0d99b0596e3..937502d4d6c 100644 --- a/tests/components/whois/snapshots/test_config_flow.ambr +++ b/tests/components/whois/snapshots/test_config_flow.ambr @@ -30,14 +30,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Example.com', 'type': , 'version': 1, @@ -74,14 +70,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Example.com', 'type': , 'version': 1, @@ -118,14 +110,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Example.com', 'type': , 'version': 1, @@ -162,14 +150,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Example.com', 'type': , 'version': 1, @@ -206,14 +190,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Example.com', 'unique_id': 'example.com', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Example.com', 'type': , 'version': 1, diff --git a/tests/components/workday/snapshots/test_diagnostics.ambr b/tests/components/workday/snapshots/test_diagnostics.ambr index e7331b911a8..f41b86b7f6d 100644 --- a/tests/components/workday/snapshots/test_diagnostics.ambr +++ b/tests/components/workday/snapshots/test_diagnostics.ambr @@ -40,8 +40,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/components/wyoming/snapshots/test_config_flow.ambr b/tests/components/wyoming/snapshots/test_config_flow.ambr index d288c531407..bdead0f2028 100644 --- a/tests/components/wyoming/snapshots/test_config_flow.ambr +++ b/tests/components/wyoming/snapshots/test_config_flow.ambr @@ -36,14 +36,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'hassio', - 'subentries': list([ - ]), 'title': 'Piper', 'unique_id': '1234', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Piper', 'type': , 'version': 1, @@ -86,14 +82,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'hassio', - 'subentries': list([ - ]), 'title': 'Piper', 'unique_id': '1234', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Piper', 'type': , 'version': 1, @@ -135,14 +127,10 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'zeroconf', - 'subentries': list([ - ]), 'title': 'Test Satellite', 'unique_id': 'test_zeroconf_name._wyoming._tcp.local._Test Satellite', 'version': 1, }), - 'subentries': tuple( - ), 'title': 'Test Satellite', 'type': , 'version': 1, diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr index 08807f65d5d..f46a06e84b8 100644 --- a/tests/components/zha/snapshots/test_diagnostics.ambr +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -113,8 +113,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 4, diff --git a/tests/snapshots/test_config_entries.ambr b/tests/snapshots/test_config_entries.ambr index 08b532677f4..51e56f4874e 100644 --- a/tests/snapshots/test_config_entries.ambr +++ b/tests/snapshots/test_config_entries.ambr @@ -16,8 +16,6 @@ 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', - 'subentries': list([ - ]), 'title': 'Mock Title', 'unique_id': None, 'version': 1, diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 1ad152e8e42..aba85a35349 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -4,7 +4,6 @@ from __future__ import annotations import asyncio from collections.abc import Generator -from contextlib import AbstractContextManager, nullcontext as does_not_raise from datetime import timedelta import logging import re @@ -906,7 +905,7 @@ async def test_entries_excludes_ignore_and_disabled( async def test_saving_and_loading( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, hass_storage: dict[str, Any] + hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test that we're saving and loading correctly.""" mock_integration( @@ -923,17 +922,7 @@ async def test_saving_and_loading( async def async_step_user(self, user_input=None): """Test user step.""" await self.async_set_unique_id("unique") - subentries = [ - config_entries.ConfigSubentryData( - data={"foo": "bar"}, title="subentry 1" - ), - config_entries.ConfigSubentryData( - data={"sun": "moon"}, title="subentry 2", unique_id="very_unique" - ), - ] - return self.async_create_entry( - title="Test Title", data={"token": "abcd"}, subentries=subentries - ) + return self.async_create_entry(title="Test Title", data={"token": "abcd"}) with mock_config_flow("test", TestFlow): await hass.config_entries.flow.async_init( @@ -982,98 +971,6 @@ async def test_saving_and_loading( # To execute the save await hass.async_block_till_done() - stored_data = hass_storage["core.config_entries"] - assert stored_data == { - "data": { - "entries": [ - { - "created_at": ANY, - "data": { - "token": "abcd", - }, - "disabled_by": None, - "discovery_keys": {}, - "domain": "test", - "entry_id": ANY, - "minor_version": 1, - "modified_at": ANY, - "options": {}, - "pref_disable_new_entities": True, - "pref_disable_polling": True, - "source": "user", - "subentries": [ - { - "data": {"foo": "bar"}, - "subentry_id": ANY, - "title": "subentry 1", - "unique_id": None, - }, - { - "data": {"sun": "moon"}, - "subentry_id": ANY, - "title": "subentry 2", - "unique_id": "very_unique", - }, - ], - "title": "Test Title", - "unique_id": "unique", - "version": 5, - }, - { - "created_at": ANY, - "data": { - "username": "bla", - }, - "disabled_by": None, - "discovery_keys": { - "test": [ - {"domain": "test", "key": "blah", "version": 1}, - ], - }, - "domain": "test", - "entry_id": ANY, - "minor_version": 1, - "modified_at": ANY, - "options": {}, - "pref_disable_new_entities": False, - "pref_disable_polling": False, - "source": "user", - "subentries": [], - "title": "Test 2 Title", - "unique_id": None, - "version": 3, - }, - { - "created_at": ANY, - "data": { - "username": "bla", - }, - "disabled_by": None, - "discovery_keys": { - "test": [ - {"domain": "test", "key": ["a", "b"], "version": 1}, - ], - }, - "domain": "test", - "entry_id": ANY, - "minor_version": 1, - "modified_at": ANY, - "options": {}, - "pref_disable_new_entities": False, - "pref_disable_polling": False, - "source": "user", - "subentries": [], - "title": "Test 2 Title", - "unique_id": None, - "version": 3, - }, - ], - }, - "key": "core.config_entries", - "minor_version": 5, - "version": 1, - } - # Now load written data in new config manager manager = config_entries.ConfigEntries(hass, {}) await manager.async_initialize() @@ -1086,25 +983,6 @@ async def test_saving_and_loading( ): assert orig.as_dict() == loaded.as_dict() - hass.config_entries.async_update_entry( - entry_1, - pref_disable_polling=False, - pref_disable_new_entities=False, - ) - - # To trigger the call_later - freezer.tick(1.0) - async_fire_time_changed(hass) - # To execute the save - await hass.async_block_till_done() - - # Assert no data is lost when storing again - expected_stored_data = stored_data - expected_stored_data["data"]["entries"][0]["modified_at"] = ANY - expected_stored_data["data"]["entries"][0]["pref_disable_new_entities"] = False - expected_stored_data["data"]["entries"][0]["pref_disable_polling"] = False - assert hass_storage["core.config_entries"] == expected_stored_data | {} - @freeze_time("2024-02-14 12:00:00") async def test_as_dict(snapshot: SnapshotAssertion) -> None: @@ -1538,42 +1416,6 @@ async def test_update_entry_options_and_trigger_listener( assert len(update_listener_calls) == 1 -async def test_update_subentry_and_trigger_listener( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test that we can update subentry and trigger listener.""" - entry = MockConfigEntry(domain="test", options={"first": True}) - entry.add_to_manager(manager) - update_listener_calls = [] - - subentry = config_entries.ConfigSubentry( - data={"test": "test"}, unique_id="test", title="Mock title" - ) - - async def update_listener( - hass: HomeAssistant, entry: config_entries.ConfigEntry - ) -> None: - """Test function.""" - assert entry.subentries == expected_subentries - update_listener_calls.append(None) - - entry.add_update_listener(update_listener) - - expected_subentries = {subentry.subentry_id: subentry} - assert manager.async_add_subentry(entry, subentry) is True - - await hass.async_block_till_done(wait_background_tasks=True) - assert entry.subentries == expected_subentries - assert len(update_listener_calls) == 1 - - expected_subentries = {} - assert manager.async_remove_subentry(entry, subentry.subentry_id) is True - - await hass.async_block_till_done(wait_background_tasks=True) - assert entry.subentries == expected_subentries - assert len(update_listener_calls) == 2 - - async def test_setup_raise_not_ready( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -1900,456 +1742,20 @@ async def test_entry_options_unknown_config_entry( mock_integration(hass, MockModule("test")) mock_platform(hass, "test.config_flow", None) + class TestFlow: + """Test flow.""" + + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Test options flow.""" + with pytest.raises(config_entries.UnknownEntry): await manager.options.async_create_flow( "blah", context={"source": "test"}, data=None ) -async def test_create_entry_subentries( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test a config entry being created with subentries.""" - - subentrydata = config_entries.ConfigSubentryData( - data={"test": "test"}, - title="Mock title", - unique_id="test", - ) - - async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Mock setup.""" - hass.async_create_task( - hass.config_entries.flow.async_init( - "comp", - context={"source": config_entries.SOURCE_IMPORT}, - data={"data": "data", "subentry": subentrydata}, - ) - ) - return True - - async_setup_entry = AsyncMock(return_value=True) - mock_integration( - hass, - MockModule( - "comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry - ), - ) - mock_platform(hass, "comp.config_flow", None) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - VERSION = 1 - - async def async_step_import(self, user_input): - """Test import step creating entry, with subentry.""" - return self.async_create_entry( - title="title", - data={"example": user_input["data"]}, - subentries=[user_input["subentry"]], - ) - - with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}): - assert await async_setup_component(hass, "comp", {}) - - await hass.async_block_till_done() - - assert len(async_setup_entry.mock_calls) == 1 - - entries = hass.config_entries.async_entries("comp") - assert len(entries) == 1 - assert entries[0].supported_subentries == () - assert entries[0].data == {"example": "data"} - assert len(entries[0].subentries) == 1 - subentry_id = list(entries[0].subentries)[0] - subentry = config_entries.ConfigSubentry( - data=subentrydata["data"], - subentry_id=subentry_id, - title=subentrydata["title"], - unique_id="test", - ) - assert entries[0].subentries == {subentry_id: subentry} - - -async def test_entry_subentry( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test that we can add a subentry to an entry.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow): - flow = await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry - - await manager.subentries.async_finish_flow( - flow, - { - "data": {"second": True}, - "title": "Mock title", - "type": data_entry_flow.FlowResultType.CREATE_ENTRY, - "unique_id": "test", - }, - ) - - assert entry.data == {"first": True} - assert entry.options == {} - subentry_id = list(entry.subentries)[0] - assert entry.subentries == { - subentry_id: config_entries.ConfigSubentry( - data={"second": True}, - subentry_id=subentry_id, - title="Mock title", - unique_id="test", - ) - } - assert entry.supported_subentries == ("test",) - - -async def test_entry_subentry_non_string( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test adding an invalid subentry to an entry.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow): - flow = await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry - - with pytest.raises(HomeAssistantError): - await manager.subentries.async_finish_flow( - flow, - { - "data": {"second": True}, - "title": "Mock title", - "type": data_entry_flow.FlowResultType.CREATE_ENTRY, - "unique_id": 123, - }, - ) - - -@pytest.mark.parametrize("context", [None, {}, {"bla": "bleh"}]) -async def test_entry_subentry_no_context( - hass: HomeAssistant, manager: config_entries.ConfigEntries, context: dict | None -) -> None: - """Test starting a subentry flow without "source" in context.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow), pytest.raises(KeyError): - await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context=context, data=None - ) - - -@pytest.mark.parametrize( - ("unique_id", "expected_result"), - [(None, does_not_raise()), ("test", pytest.raises(HomeAssistantError))], -) -async def test_entry_subentry_duplicate( - hass: HomeAssistant, - manager: config_entries.ConfigEntries, - unique_id: str | None, - expected_result: AbstractContextManager, -) -> None: - """Test adding a duplicated subentry to an entry.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry( - domain="test", - data={"first": True}, - subentries_data=[ - config_entries.ConfigSubentryData( - data={}, - subentry_id="blabla", - title="Mock title", - unique_id=unique_id, - ) - ], - ) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow): - flow = await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry - - with expected_result: - await manager.subentries.async_finish_flow( - flow, - { - "data": {"second": True}, - "title": "Mock title", - "type": data_entry_flow.FlowResultType.CREATE_ENTRY, - "unique_id": unique_id, - }, - ) - - -async def test_entry_subentry_abort( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test that we can abort subentry flow.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow): - flow = await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry - - assert await manager.subentries.async_finish_flow( - flow, {"type": data_entry_flow.FlowResultType.ABORT, "reason": "test"} - ) - - -async def test_entry_subentry_unknown_config_entry( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test attempting to start a subentry flow for an unknown config entry.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - - with pytest.raises(config_entries.UnknownEntry): - await manager.subentries.async_create_flow( - ("blah", "blah"), context={"source": "test"}, data=None - ) - - -async def test_entry_subentry_deleted_config_entry( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test attempting to finish a subentry flow for a deleted config entry.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with mock_config_flow("test", TestFlow): - flow = await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - flow.handler = (entry.entry_id, "test") # Set to keep reference to config entry - - await hass.config_entries.async_remove(entry.entry_id) - - with pytest.raises(config_entries.UnknownEntry): - await manager.subentries.async_finish_flow( - flow, - { - "data": {"second": True}, - "title": "Mock title", - "type": data_entry_flow.FlowResultType.CREATE_ENTRY, - "unique_id": "test", - }, - ) - - -async def test_entry_subentry_unsupported( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test attempting to start a subentry flow for a config entry without support.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - @staticmethod - @callback - def async_get_subentry_flow(config_entry, subentry_type: str): - """Test subentry flow.""" - - class SubentryFlowHandler(data_entry_flow.FlowHandler): - """Test subentry flow handler.""" - - return SubentryFlowHandler() - - @classmethod - @callback - def async_supported_subentries( - cls, config_entry: ConfigEntry - ) -> tuple[str, ...]: - return ("test",) - - with ( - mock_config_flow("test", TestFlow), - pytest.raises(data_entry_flow.UnknownHandler), - ): - await manager.subentries.async_create_flow( - ( - entry.entry_id, - "unknown", - ), - context={"source": "test"}, - data=None, - ) - - -async def test_entry_subentry_unsupported_subentry_type( - hass: HomeAssistant, manager: config_entries.ConfigEntries -) -> None: - """Test attempting to start a subentry flow for a config entry without support.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - entry = MockConfigEntry(domain="test", data={"first": True}) - entry.add_to_manager(manager) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - with ( - mock_config_flow("test", TestFlow), - pytest.raises(data_entry_flow.UnknownHandler), - ): - await manager.subentries.async_create_flow( - (entry.entry_id, "test"), context={"source": "test"}, data=None - ) - - async def test_entry_setup_succeed( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: @@ -4505,20 +3911,21 @@ async def test_updating_entry_with_and_without_changes( assert manager.async_update_entry(entry) is False - for change, expected_value in ( - ({"data": {"second": True, "third": 456}}, {"second": True, "third": 456}), - ({"data": {"second": True}}, {"second": True}), - ({"minor_version": 2}, 2), - ({"options": {"hello": True}}, {"hello": True}), - ({"pref_disable_new_entities": True}, True), - ({"pref_disable_polling": True}, True), - ({"title": "sometitle"}, "sometitle"), - ({"unique_id": "abcd1234"}, "abcd1234"), - ({"version": 2}, 2), + for change in ( + {"data": {"second": True, "third": 456}}, + {"data": {"second": True}}, + {"minor_version": 2}, + {"options": {"hello": True}}, + {"pref_disable_new_entities": True}, + {"pref_disable_polling": True}, + {"title": "sometitle"}, + {"unique_id": "abcd1234"}, + {"version": 2}, ): assert manager.async_update_entry(entry, **change) is True key = next(iter(change)) - assert getattr(entry, key) == expected_value + value = next(iter(change.values())) + assert getattr(entry, key) == value assert manager.async_update_entry(entry, **change) is False assert manager.async_entry_for_domain_unique_id("test", "abc123") is None @@ -6052,7 +5459,6 @@ async def test_unhashable_unique_id_fails( minor_version=1, options={}, source="test", - subentries_data=(), title="title", unique_id=unique_id, version=1, @@ -6088,7 +5494,6 @@ async def test_unhashable_unique_id_fails_on_update( minor_version=1, options={}, source="test", - subentries_data=(), title="title", unique_id="123", version=1, @@ -6119,7 +5524,6 @@ async def test_string_unique_id_no_warning( minor_version=1, options={}, source="test", - subentries_data=(), title="title", unique_id="123", version=1, @@ -6162,7 +5566,6 @@ async def test_hashable_unique_id( minor_version=1, options={}, source="test", - subentries_data=(), title="title", unique_id=unique_id, version=1, @@ -6197,7 +5600,6 @@ async def test_no_unique_id_no_warning( minor_version=1, options={}, source="test", - subentries_data=(), title="title", unique_id=None, version=1, @@ -7122,7 +6524,6 @@ async def test_migration_from_1_2( "pref_disable_new_entities": False, "pref_disable_polling": False, "source": "import", - "subentries": {}, "title": "Sun", "unique_id": None, "version": 1, From ca2c7280eb2649f1315e8cf104636f1eaf20f3a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Wed, 18 Dec 2024 12:59:56 +0000 Subject: [PATCH 414/677] Remove uneeded logger param from Idasen Desk Coordinator (#133485) --- homeassistant/components/idasen_desk/__init__.py | 2 +- homeassistant/components/idasen_desk/coordinator.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/idasen_desk/__init__.py b/homeassistant/components/idasen_desk/__init__.py index 1aacea91723..671319e46eb 100644 --- a/homeassistant/components/idasen_desk/__init__.py +++ b/homeassistant/components/idasen_desk/__init__.py @@ -27,7 +27,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: IdasenDeskConfigEntry) - """Set up IKEA Idasen from a config entry.""" address: str = entry.data[CONF_ADDRESS].upper() - coordinator = IdasenDeskCoordinator(hass, _LOGGER, entry.title, address) + coordinator = IdasenDeskCoordinator(hass, entry.title, address) entry.runtime_data = coordinator try: diff --git a/homeassistant/components/idasen_desk/coordinator.py b/homeassistant/components/idasen_desk/coordinator.py index a84027a26c0..d9e90cfe5ea 100644 --- a/homeassistant/components/idasen_desk/coordinator.py +++ b/homeassistant/components/idasen_desk/coordinator.py @@ -19,13 +19,12 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]): def __init__( self, hass: HomeAssistant, - logger: logging.Logger, name: str, address: str, ) -> None: """Init IdasenDeskCoordinator.""" - super().__init__(hass, logger, name=name) + super().__init__(hass, _LOGGER, name=name) self.address = address self._expected_connected = False From 4399d09820c20ea254100672ee5d4a3a40d276c8 Mon Sep 17 00:00:00 2001 From: Bas Brussee <68892092+basbruss@users.noreply.github.com> Date: Wed, 18 Dec 2024 14:02:08 +0100 Subject: [PATCH 415/677] Allow data description in sections (#128965) * Allow data description in sections * update format with ruff * Add data_description to kitchen_sink input section --------- Co-authored-by: Erik --- homeassistant/components/kitchen_sink/strings.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/kitchen_sink/strings.json b/homeassistant/components/kitchen_sink/strings.json index 63e27e04637..b8dcfdd8e69 100644 --- a/homeassistant/components/kitchen_sink/strings.json +++ b/homeassistant/components/kitchen_sink/strings.json @@ -21,6 +21,9 @@ "bool": "Optional boolean", "int": "Numeric input" }, + "data_description": { + "int": "A longer description for the numeric input" + }, "description": "This section allows input of some extra data", "name": "Collapsible section" } From c06bc537248aed95037b85ba15be129ac567af9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=2E=20Diego=20Rodr=C3=ADguez=20Royo?= Date: Wed, 18 Dec 2024 14:26:37 +0100 Subject: [PATCH 416/677] Deprecate Home Connect program switches (#131641) --- .../components/home_connect/strings.json | 4 + .../components/home_connect/switch.py | 57 ++++++++++++++ tests/components/home_connect/test_switch.py | 75 +++++++++++++++++++ 3 files changed, 136 insertions(+) diff --git a/homeassistant/components/home_connect/strings.json b/homeassistant/components/home_connect/strings.json index e70f2f28c65..f5c3cf69807 100644 --- a/homeassistant/components/home_connect/strings.json +++ b/homeassistant/components/home_connect/strings.json @@ -90,6 +90,10 @@ "deprecated_binary_common_door_sensor": { "title": "Deprecated binary door sensor detected in some automations or scripts", "description": "The binary door sensor `{entity}`, which is deprecated, is used in the following automations or scripts:\n{items}\n\nA sensor entity with additional possible states is available and should be used going forward; Please use it on the above automations or scripts to fix this issue." + }, + "deprecated_program_switch": { + "title": "Deprecated program switch detected in some automations or scripts", + "description": "Program switch are deprecated and {entity_id} is used in the following automations or scripts:\n{items}\n\nYou can use active program select entity to run the program without any additional option and get the current running program on the above automations or scripts to fix this issue." } }, "services": { diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index acb78e87db1..305077bfb86 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -6,10 +6,18 @@ from typing import Any from homeconnect.api import HomeConnectError +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from . import HomeConnectConfigEntry, get_dict_from_home_connect_error from .const import ( @@ -201,6 +209,55 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): self._attr_has_entity_name = False self.program_name = program_name + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + items = automations + scripts + if not items: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + entity_automations = [ + automation_entity + for automation_id in automations + if (automation_entity := entity_reg.async_get(automation_id)) + ] + entity_scripts = [ + script_entity + for script_id in scripts + if (script_entity := entity_reg.async_get(script_id)) + ] + + items_list = [ + f"- [{item.original_name}](/config/automation/edit/{item.unique_id})" + for item in entity_automations + ] + [ + f"- [{item.original_name}](/config/script/edit/{item.unique_id})" + for item in entity_scripts + ] + + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_program_switch_{self.entity_id}", + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_program_switch", + translation_placeholders={ + "entity_id": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + async_delete_issue( + self.hass, DOMAIN, f"deprecated_program_switch_{self.entity_id}" + ) + async def async_turn_on(self, **kwargs: Any) -> None: """Start the program.""" _LOGGER.debug("Tried to turn on program %s", self.program_name) diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index 3a89005dc59..a02cb553ece 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -6,6 +6,8 @@ from unittest.mock import MagicMock, Mock from homeconnect.api import HomeConnectAppliance, HomeConnectError import pytest +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity from homeassistant.components.home_connect.const import ( ATTR_ALLOWED_VALUES, ATTR_CONSTRAINTS, @@ -16,8 +18,10 @@ from homeassistant.components.home_connect.const import ( BSH_POWER_ON, BSH_POWER_STANDBY, BSH_POWER_STATE, + DOMAIN, REFRIGERATION_SUPERMODEFREEZER, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( @@ -30,6 +34,8 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component from .conftest import get_all_appliances @@ -506,3 +512,72 @@ async def test_power_switch_service_validation_errors( await hass.services.async_call( SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.usefixtures("bypass_throttle") +async def test_create_issue( + hass: HomeAssistant, + appliance: Mock, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + entity_id = "switch.washer_program_mix" + appliance.status.update(SETTINGS_STATUS) + appliance.get_programs_available.return_value = [PROGRAM] + get_appliances.return_value = [appliance] + issue_id = f"deprecated_program_switch_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "action": "switch.turn_on", + "entity_id": entity_id, + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + assert issue_registry.async_get_issue(DOMAIN, issue_id) + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 From a46a0ad2b4692b3a8dd6f0df07df721bc518f20d Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Wed, 18 Dec 2024 14:35:02 +0100 Subject: [PATCH 417/677] Add device_id parameter to LCN actions (service calls) (#129590) --- homeassistant/components/lcn/__init__.py | 2 + homeassistant/components/lcn/const.py | 1 + homeassistant/components/lcn/helpers.py | 11 +- homeassistant/components/lcn/services.py | 53 +++++- homeassistant/components/lcn/services.yaml | 118 +++++++++++-- homeassistant/components/lcn/strings.json | 89 ++++++++++ tests/components/lcn/test_services.py | 193 +++++++++++++++------ 7 files changed, 398 insertions(+), 69 deletions(-) diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index eb26ef48e4e..6dc6fb1ecc4 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -31,6 +31,7 @@ from .const import ( CONF_SK_NUM_TRIES, CONF_TRANSITION, CONNECTION, + DEVICE_CONNECTIONS, DOMAIN, PLATFORMS, ) @@ -102,6 +103,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b _LOGGER.debug('LCN connected to "%s"', config_entry.title) hass.data[DOMAIN][config_entry.entry_id] = { CONNECTION: lcn_connection, + DEVICE_CONNECTIONS: {}, ADD_ENTITIES_CALLBACKS: {}, } # Update config_entry with LCN device serials diff --git a/homeassistant/components/lcn/const.py b/homeassistant/components/lcn/const.py index 97aeeecd8b5..cee9da9be43 100644 --- a/homeassistant/components/lcn/const.py +++ b/homeassistant/components/lcn/const.py @@ -20,6 +20,7 @@ DEFAULT_NAME = "pchk" ADD_ENTITIES_CALLBACKS = "add_entities_callbacks" CONNECTION = "connection" +DEVICE_CONNECTIONS = "device_connections" CONF_HARDWARE_SERIAL = "hardware_serial" CONF_SOFTWARE_SERIAL = "software_serial" CONF_HARDWARE_TYPE = "hardware_type" diff --git a/homeassistant/components/lcn/helpers.py b/homeassistant/components/lcn/helpers.py index 6a9c63ea212..348305c775e 100644 --- a/homeassistant/components/lcn/helpers.py +++ b/homeassistant/components/lcn/helpers.py @@ -38,6 +38,7 @@ from .const import ( CONF_SCENES, CONF_SOFTWARE_SERIAL, CONNECTION, + DEVICE_CONNECTIONS, DOMAIN, LED_PORTS, LOGICOP_PORTS, @@ -237,7 +238,7 @@ def register_lcn_address_devices( identifiers = {(DOMAIN, generate_unique_id(config_entry.entry_id, address))} if device_config[CONF_ADDRESS][2]: # is group - device_model = f"LCN group (g{address[0]:03d}{address[1]:03d})" + device_model = "LCN group" sw_version = None else: # is module hardware_type = device_config[CONF_HARDWARE_TYPE] @@ -245,10 +246,10 @@ def register_lcn_address_devices( hardware_name = pypck.lcn_defs.HARDWARE_DESCRIPTIONS[hardware_type] else: hardware_name = pypck.lcn_defs.HARDWARE_DESCRIPTIONS[-1] - device_model = f"{hardware_name} (m{address[0]:03d}{address[1]:03d})" + device_model = f"{hardware_name}" sw_version = f"{device_config[CONF_SOFTWARE_SERIAL]:06X}" - device_registry.async_get_or_create( + device_entry = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers=identifiers, via_device=host_identifiers, @@ -258,6 +259,10 @@ def register_lcn_address_devices( model=device_model, ) + hass.data[DOMAIN][config_entry.entry_id][DEVICE_CONNECTIONS][ + device_entry.id + ] = get_device_connection(hass, address, config_entry) + async def async_update_device_config( device_connection: DeviceConnectionType, device_config: ConfigType diff --git a/homeassistant/components/lcn/services.py b/homeassistant/components/lcn/services.py index 92f5863c47e..a6c42de0487 100644 --- a/homeassistant/components/lcn/services.py +++ b/homeassistant/components/lcn/services.py @@ -8,12 +8,21 @@ import voluptuous as vol from homeassistant.const import ( CONF_ADDRESS, CONF_BRIGHTNESS, + CONF_DEVICE_ID, CONF_HOST, CONF_STATE, CONF_UNIT_OF_MEASUREMENT, ) -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .const import ( CONF_KEYS, @@ -30,6 +39,7 @@ from .const import ( CONF_TRANSITION, CONF_VALUE, CONF_VARIABLE, + DEVICE_CONNECTIONS, DOMAIN, LED_PORTS, LED_STATUS, @@ -53,7 +63,13 @@ from .helpers import ( class LcnServiceCall: """Parent class for all LCN service calls.""" - schema = vol.Schema({vol.Required(CONF_ADDRESS): is_address}) + schema = vol.Schema( + { + vol.Optional(CONF_DEVICE_ID): cv.string, + vol.Optional(CONF_ADDRESS): is_address, + } + ) + supports_response = SupportsResponse.NONE def __init__(self, hass: HomeAssistant) -> None: """Initialize service call.""" @@ -61,8 +77,37 @@ class LcnServiceCall: def get_device_connection(self, service: ServiceCall) -> DeviceConnectionType: """Get address connection object.""" - address, host_name = service.data[CONF_ADDRESS] + if CONF_DEVICE_ID not in service.data and CONF_ADDRESS not in service.data: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="no_device_identifier", + ) + if CONF_DEVICE_ID in service.data: + device_id = service.data[CONF_DEVICE_ID] + device_registry = dr.async_get(self.hass) + if not (device := device_registry.async_get(device_id)): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_device_id", + translation_placeholders={"device_id": device_id}, + ) + + return self.hass.data[DOMAIN][device.primary_config_entry][ + DEVICE_CONNECTIONS + ][device_id] + + async_create_issue( + self.hass, + DOMAIN, + "deprecated_address_parameter", + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_address_parameter", + ) + + address, host_name = service.data[CONF_ADDRESS] for config_entry in self.hass.config_entries.async_entries(DOMAIN): if config_entry.data[CONF_HOST] == host_name: device_connection = get_device_connection( @@ -73,7 +118,7 @@ class LcnServiceCall: return device_connection raise ValueError("Invalid host name.") - async def async_call_service(self, service: ServiceCall) -> None: + async def async_call_service(self, service: ServiceCall) -> ServiceResponse: """Execute service call.""" raise NotImplementedError diff --git a/homeassistant/components/lcn/services.yaml b/homeassistant/components/lcn/services.yaml index d62a1e72d45..f58e79b9f40 100644 --- a/homeassistant/components/lcn/services.yaml +++ b/homeassistant/components/lcn/services.yaml @@ -2,8 +2,76 @@ output_abs: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: &device_selector + device: + filter: + - integration: lcn + model: LCN group + - integration: lcn + model: UnknownModuleType + - integration: lcn + model: LCN-SW1.0 + - integration: lcn + model: LCN-SW1.1 + - integration: lcn + model: LCN-UP1.0 + - integration: lcn + model: LCN-UP2 + - integration: lcn + model: LCN-SW2 + - integration: lcn + model: LCN-UP-Profi1-Plus + - integration: lcn + model: LCN-DI12 + - integration: lcn + model: LCN-HU + - integration: lcn + model: LCN-SH + - integration: lcn + model: LCN-UP2 + - integration: lcn + model: LCN-UPP + - integration: lcn + model: LCN-SK + - integration: lcn + model: LCN-LD + - integration: lcn + model: LCN-SH-Plus + - integration: lcn + model: LCN-UPS + - integration: lcn + model: LCN_UPS24V + - integration: lcn + model: LCN-GTM + - integration: lcn + model: LCN-SHS + - integration: lcn + model: LCN-ESD + - integration: lcn + model: LCN-EB2 + - integration: lcn + model: LCN-MRS + - integration: lcn + model: LCN-EB11 + - integration: lcn + model: LCN-UMR + - integration: lcn + model: LCN-UPU + - integration: lcn + model: LCN-UMR24V + - integration: lcn + model: LCN-SHD + - integration: lcn + model: LCN-SHU + - integration: lcn + model: LCN-SR6 + - integration: lcn + model: LCN-UMF + - integration: lcn + model: LCN-WBH address: - required: true example: "myhome.s0.m7" selector: text: @@ -34,8 +102,10 @@ output_abs: output_rel: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -58,8 +128,10 @@ output_rel: output_toggle: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -83,8 +155,10 @@ output_toggle: relays: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -96,8 +170,10 @@ relays: led: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -130,8 +206,10 @@ led: var_abs: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -197,8 +275,10 @@ var_abs: var_reset: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -230,8 +310,10 @@ var_reset: var_rel: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -321,8 +403,10 @@ var_rel: lock_regulator: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -355,8 +439,10 @@ lock_regulator: send_keys: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -402,8 +488,10 @@ send_keys: lock_keys: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -445,8 +533,10 @@ lock_keys: dyn_text: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: @@ -464,8 +554,10 @@ dyn_text: pck: fields: + device_id: + example: "91aa039a2fb6e0b9f9ec7eb219a6b7d2" + selector: *device_selector address: - required: true example: "myhome.s0.m7" selector: text: diff --git a/homeassistant/components/lcn/strings.json b/homeassistant/components/lcn/strings.json index 088a3654500..988c2a637fb 100644 --- a/homeassistant/components/lcn/strings.json +++ b/homeassistant/components/lcn/strings.json @@ -70,6 +70,10 @@ "deprecated_keylock_sensor": { "title": "Deprecated LCN key lock binary sensor", "description": "Your LCN key lock binary sensor entity `{entity}` is beeing used in automations or scripts. A key lock switch entity is available and should be used going forward.\n\nPlease adjust your automations or scripts to fix this issue." + }, + "deprecated_address_parameter": { + "title": "Deprecated 'address' parameter", + "description": "The 'address' parameter in the LCN service calls is deprecated. The 'devide_id' parameter should be used going forward.\n\nPlease adjust your automations or scripts to fix this issue." } }, "services": { @@ -77,6 +81,10 @@ "name": "Output absolute brightness", "description": "Sets absolute brightness of output port in percent.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "The device_id of the LCN module or group." + }, "address": { "name": "Address", "description": "Module address." @@ -99,6 +107,10 @@ "name": "Output relative brightness", "description": "Sets relative brightness of output port in percent.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -117,6 +129,10 @@ "name": "Toggle output", "description": "Toggles output port.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -135,6 +151,10 @@ "name": "Relays", "description": "Sets the relays status.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -149,6 +169,10 @@ "name": "LED", "description": "Sets the led state.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -167,6 +191,10 @@ "name": "Set absolute variable", "description": "Sets absolute value of a variable or setpoint.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -189,6 +217,10 @@ "name": "Reset variable", "description": "Resets value of variable or setpoint.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -203,6 +235,10 @@ "name": "Shift variable", "description": "Shift value of a variable, setpoint or threshold.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -229,6 +265,10 @@ "name": "Lock regulator", "description": "Locks a regulator setpoint.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -247,6 +287,10 @@ "name": "Send keys", "description": "Sends keys (which executes bound commands).", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -273,6 +317,10 @@ "name": "Lock keys", "description": "Locks keys.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -299,6 +347,10 @@ "name": "Dynamic text", "description": "Sends dynamic text to LCN-GTxD displays.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -317,6 +369,10 @@ "name": "PCK", "description": "Sends arbitrary PCK command.", "fields": { + "device_id": { + "name": "[%key:common::config_flow::data::device%]", + "description": "[%key:component::lcn::services::output_abs::fields::device_id::description%]" + }, "address": { "name": "Address", "description": "[%key:component::lcn::services::output_abs::fields::address::description%]" @@ -326,6 +382,39 @@ "description": "PCK command (without address header)." } } + }, + "address_to_device_id": { + "name": "Address to device id", + "description": "Convert LCN address to device id.", + "fields": { + "id": { + "name": "Module or group id", + "description": "Target module or group id." + }, + "segment_id": { + "name": "Segment id", + "description": "Target segment id." + }, + "type": { + "name": "Type", + "description": "Target type." + }, + "host": { + "name": "Host name", + "description": "Host name as given in the integration panel." + } + } + } + }, + "exceptions": { + "no_device_identifier": { + "message": "No device identifier provided. Please provide the device id." + }, + "invalid_address": { + "message": "LCN device for given address has not been configured." + }, + "invalid_device_id": { + "message": "LCN device for given device id has not been configured." } } } diff --git a/tests/components/lcn/test_services.py b/tests/components/lcn/test_services.py index a4ea559cd72..cd97e3484e3 100644 --- a/tests/components/lcn/test_services.py +++ b/tests/components/lcn/test_services.py @@ -26,22 +26,37 @@ from homeassistant.components.lcn.services import LcnService from homeassistant.const import ( CONF_ADDRESS, CONF_BRIGHTNESS, + CONF_DEVICE_ID, CONF_STATE, CONF_UNIT_OF_MEASUREMENT, ) from homeassistant.core import HomeAssistant +import homeassistant.helpers.issue_registry as ir from homeassistant.setup import async_setup_component from .conftest import ( MockConfigEntry, MockModuleConnection, - MockPchkConnectionManager, + get_device, init_integration, ) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_output_abs(hass: HomeAssistant, entry: MockConfigEntry) -> None: +def device_config( + hass: HomeAssistant, entry: MockConfigEntry, config_type: str +) -> dict[str, str]: + """Return test device config depending on type.""" + if config_type == CONF_ADDRESS: + return {CONF_ADDRESS: "pchk.s0.m7"} + return {CONF_DEVICE_ID: get_device(hass, entry, (0, 7, False)).id} + + +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_output_abs( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test output_abs service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -51,7 +66,7 @@ async def test_service_output_abs(hass: HomeAssistant, entry: MockConfigEntry) - DOMAIN, LcnService.OUTPUT_ABS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_OUTPUT: "output1", CONF_BRIGHTNESS: 100, CONF_TRANSITION: 5, @@ -62,8 +77,12 @@ async def test_service_output_abs(hass: HomeAssistant, entry: MockConfigEntry) - dim_output.assert_awaited_with(0, 100, 9) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_output_rel(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_output_rel( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test output_rel service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -73,7 +92,7 @@ async def test_service_output_rel(hass: HomeAssistant, entry: MockConfigEntry) - DOMAIN, LcnService.OUTPUT_REL, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_OUTPUT: "output1", CONF_BRIGHTNESS: 25, }, @@ -83,9 +102,11 @@ async def test_service_output_rel(hass: HomeAssistant, entry: MockConfigEntry) - rel_output.assert_awaited_with(0, 25) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) async def test_service_output_toggle( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, ) -> None: """Test output_toggle service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -96,7 +117,7 @@ async def test_service_output_toggle( DOMAIN, LcnService.OUTPUT_TOGGLE, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_OUTPUT: "output1", CONF_TRANSITION: 5, }, @@ -106,8 +127,12 @@ async def test_service_output_toggle( toggle_output.assert_awaited_with(0, 9) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_relays(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_relays( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test relays service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -116,7 +141,7 @@ async def test_service_relays(hass: HomeAssistant, entry: MockConfigEntry) -> No await hass.services.async_call( DOMAIN, LcnService.RELAYS, - {CONF_ADDRESS: "pchk.s0.m7", CONF_STATE: "0011TT--"}, + {**device_config(hass, entry, config_type), CONF_STATE: "0011TT--"}, blocking=True, ) @@ -126,8 +151,12 @@ async def test_service_relays(hass: HomeAssistant, entry: MockConfigEntry) -> No control_relays.assert_awaited_with(relay_states) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_led(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_led( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test led service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -136,7 +165,11 @@ async def test_service_led(hass: HomeAssistant, entry: MockConfigEntry) -> None: await hass.services.async_call( DOMAIN, LcnService.LED, - {CONF_ADDRESS: "pchk.s0.m7", CONF_LED: "led6", CONF_STATE: "blink"}, + { + **device_config(hass, entry, config_type), + CONF_LED: "led6", + CONF_STATE: "blink", + }, blocking=True, ) @@ -146,8 +179,12 @@ async def test_service_led(hass: HomeAssistant, entry: MockConfigEntry) -> None: control_led.assert_awaited_with(led, led_state) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_abs(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_var_abs( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test var_abs service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -157,7 +194,7 @@ async def test_service_var_abs(hass: HomeAssistant, entry: MockConfigEntry) -> N DOMAIN, LcnService.VAR_ABS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_VARIABLE: "var1", CONF_VALUE: 75, CONF_UNIT_OF_MEASUREMENT: "%", @@ -170,8 +207,12 @@ async def test_service_var_abs(hass: HomeAssistant, entry: MockConfigEntry) -> N ) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_rel(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_var_rel( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test var_rel service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -181,7 +222,7 @@ async def test_service_var_rel(hass: HomeAssistant, entry: MockConfigEntry) -> N DOMAIN, LcnService.VAR_REL, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_VARIABLE: "var1", CONF_VALUE: 10, CONF_UNIT_OF_MEASUREMENT: "%", @@ -198,8 +239,12 @@ async def test_service_var_rel(hass: HomeAssistant, entry: MockConfigEntry) -> N ) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_reset(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_var_reset( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test var_reset service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -208,16 +253,18 @@ async def test_service_var_reset(hass: HomeAssistant, entry: MockConfigEntry) -> await hass.services.async_call( DOMAIN, LcnService.VAR_RESET, - {CONF_ADDRESS: "pchk.s0.m7", CONF_VARIABLE: "var1"}, + {**device_config(hass, entry, config_type), CONF_VARIABLE: "var1"}, blocking=True, ) var_reset.assert_awaited_with(pypck.lcn_defs.Var["VAR1"]) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) async def test_service_lock_regulator( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, ) -> None: """Test lock_regulator service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -228,7 +275,7 @@ async def test_service_lock_regulator( DOMAIN, LcnService.LOCK_REGULATOR, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_SETPOINT: "r1varsetpoint", CONF_STATE: True, }, @@ -238,8 +285,12 @@ async def test_service_lock_regulator( lock_regulator.assert_awaited_with(0, True) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_send_keys(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_send_keys( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test send_keys service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -248,7 +299,11 @@ async def test_service_send_keys(hass: HomeAssistant, entry: MockConfigEntry) -> await hass.services.async_call( DOMAIN, LcnService.SEND_KEYS, - {CONF_ADDRESS: "pchk.s0.m7", CONF_KEYS: "a1a5d8", CONF_STATE: "hit"}, + { + **device_config(hass, entry, config_type), + CONF_KEYS: "a1a5d8", + CONF_STATE: "hit", + }, blocking=True, ) @@ -260,9 +315,11 @@ async def test_service_send_keys(hass: HomeAssistant, entry: MockConfigEntry) -> send_keys.assert_awaited_with(keys, pypck.lcn_defs.SendKeyCommand["HIT"]) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) async def test_service_send_keys_hit_deferred( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, ) -> None: """Test send_keys (hit_deferred) service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -281,7 +338,7 @@ async def test_service_send_keys_hit_deferred( DOMAIN, LcnService.SEND_KEYS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_KEYS: "a1a5d8", CONF_TIME: 5, CONF_TIME_UNIT: "s", @@ -304,7 +361,7 @@ async def test_service_send_keys_hit_deferred( DOMAIN, LcnService.SEND_KEYS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_KEYS: "a1a5d8", CONF_STATE: "make", CONF_TIME: 5, @@ -314,8 +371,12 @@ async def test_service_send_keys_hit_deferred( ) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_lock_keys(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_lock_keys( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test lock_keys service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -324,7 +385,11 @@ async def test_service_lock_keys(hass: HomeAssistant, entry: MockConfigEntry) -> await hass.services.async_call( DOMAIN, LcnService.LOCK_KEYS, - {CONF_ADDRESS: "pchk.s0.m7", CONF_TABLE: "a", CONF_STATE: "0011TT--"}, + { + **device_config(hass, entry, config_type), + CONF_TABLE: "a", + CONF_STATE: "0011TT--", + }, blocking=True, ) @@ -334,9 +399,11 @@ async def test_service_lock_keys(hass: HomeAssistant, entry: MockConfigEntry) -> lock_keys.assert_awaited_with(0, lock_states) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) async def test_service_lock_keys_tab_a_temporary( - hass: HomeAssistant, entry: MockConfigEntry + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, ) -> None: """Test lock_keys (tab_a_temporary) service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -350,7 +417,7 @@ async def test_service_lock_keys_tab_a_temporary( DOMAIN, LcnService.LOCK_KEYS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_STATE: "0011TT--", CONF_TIME: 10, CONF_TIME_UNIT: "s", @@ -376,7 +443,7 @@ async def test_service_lock_keys_tab_a_temporary( DOMAIN, LcnService.LOCK_KEYS, { - CONF_ADDRESS: "pchk.s0.m7", + **device_config(hass, entry, config_type), CONF_TABLE: "b", CONF_STATE: "0011TT--", CONF_TIME: 10, @@ -386,8 +453,12 @@ async def test_service_lock_keys_tab_a_temporary( ) -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_dyn_text(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_dyn_text( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test dyn_text service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -396,15 +467,23 @@ async def test_service_dyn_text(hass: HomeAssistant, entry: MockConfigEntry) -> await hass.services.async_call( DOMAIN, LcnService.DYN_TEXT, - {CONF_ADDRESS: "pchk.s0.m7", CONF_ROW: 1, CONF_TEXT: "text in row 1"}, + { + **device_config(hass, entry, config_type), + CONF_ROW: 1, + CONF_TEXT: "text in row 1", + }, blocking=True, ) dyn_text.assert_awaited_with(0, "text in row 1") -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_pck(hass: HomeAssistant, entry: MockConfigEntry) -> None: +@pytest.mark.parametrize("config_type", [CONF_ADDRESS, CONF_DEVICE_ID]) +async def test_service_pck( + hass: HomeAssistant, + entry: MockConfigEntry, + config_type: str, +) -> None: """Test pck service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -413,14 +492,13 @@ async def test_service_pck(hass: HomeAssistant, entry: MockConfigEntry) -> None: await hass.services.async_call( DOMAIN, LcnService.PCK, - {CONF_ADDRESS: "pchk.s0.m7", CONF_PCK: "PIN4"}, + {**device_config(hass, entry, config_type), CONF_PCK: "PIN4"}, blocking=True, ) pck.assert_awaited_with("PIN4") -@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_called_with_invalid_host_id( hass: HomeAssistant, entry: MockConfigEntry ) -> None: @@ -437,3 +515,20 @@ async def test_service_called_with_invalid_host_id( ) pck.assert_not_awaited() + + +async def test_service_with_deprecated_address_parameter( + hass: HomeAssistant, entry: MockConfigEntry, issue_registry: ir.IssueRegistry +) -> None: + """Test service puts issue in registry if called with address parameter.""" + await async_setup_component(hass, "persistent_notification", {}) + await init_integration(hass, entry) + + await hass.services.async_call( + DOMAIN, + LcnService.PCK, + {CONF_ADDRESS: "pchk.s0.m7", CONF_PCK: "PIN4"}, + blocking=True, + ) + + assert issue_registry.async_get_issue(DOMAIN, "deprecated_address_parameter") From 971618399723472f17541130f260b5148f77f9af Mon Sep 17 00:00:00 2001 From: Guido Schmitz Date: Wed, 18 Dec 2024 14:38:29 +0100 Subject: [PATCH 418/677] Add entity translations to devolo Home Control (#132927) --- .../devolo_home_control/binary_sensor.py | 13 +--- .../components/devolo_home_control/sensor.py | 7 +- .../devolo_home_control/strings.json | 15 ++++ tests/components/devolo_home_control/mocks.py | 38 +++++++++- .../snapshots/test_binary_sensor.ambr | 4 +- .../snapshots/test_sensor.ambr | 74 ++++++++++++++++--- .../devolo_home_control/test_sensor.py | 56 ++++++++------ 7 files changed, 157 insertions(+), 50 deletions(-) diff --git a/homeassistant/components/devolo_home_control/binary_sensor.py b/homeassistant/components/devolo_home_control/binary_sensor.py index 449b1c7659f..d24033a80b9 100644 --- a/homeassistant/components/devolo_home_control/binary_sensor.py +++ b/homeassistant/components/devolo_home_control/binary_sensor.py @@ -81,14 +81,8 @@ class DevoloBinaryDeviceEntity(DevoloDeviceEntity, BinarySensorEntity): or self._binary_sensor_property.sensor_type ) - if device_instance.binary_sensor_property[element_uid].sub_type != "": - self._attr_name = device_instance.binary_sensor_property[ - element_uid - ].sub_type.capitalize() - else: - self._attr_name = device_instance.binary_sensor_property[ - element_uid - ].sensor_type.capitalize() + if device_instance.binary_sensor_property[element_uid].sub_type == "overload": + self._attr_translation_key = "overload" self._value = self._binary_sensor_property.state @@ -129,7 +123,8 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity): self._key = key self._attr_is_on = False - self._attr_name = f"Button {key}" + self._attr_translation_key = "button" + self._attr_translation_placeholders = {"key": str(key)} def _sync(self, message: tuple) -> None: """Update the binary sensor state.""" diff --git a/homeassistant/components/devolo_home_control/sensor.py b/homeassistant/components/devolo_home_control/sensor.py index 61a63419732..8d0a7f0313c 100644 --- a/homeassistant/components/devolo_home_control/sensor.py +++ b/homeassistant/components/devolo_home_control/sensor.py @@ -116,9 +116,11 @@ class DevoloGenericMultiLevelDeviceEntity(DevoloMultiLevelDeviceEntity): self._multi_level_sensor_property.sensor_type ) self._attr_native_unit_of_measurement = self._multi_level_sensor_property.unit - self._attr_name = self._multi_level_sensor_property.sensor_type.capitalize() self._value = self._multi_level_sensor_property.value + if self._multi_level_sensor_property.sensor_type == "light": + self._attr_translation_key = "brightness" + if element_uid.startswith("devolo.VoltageMultiLevelSensor:"): self._attr_entity_registry_enabled_default = False @@ -128,7 +130,6 @@ class DevoloBatteryEntity(DevoloMultiLevelDeviceEntity): _attr_entity_category = EntityCategory.DIAGNOSTIC _attr_native_unit_of_measurement = PERCENTAGE - _attr_name = "Battery level" _attr_device_class = SensorDeviceClass.BATTERY _attr_state_class = SensorStateClass.MEASUREMENT @@ -175,8 +176,6 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity): device_instance.consumption_property[element_uid], consumption ) - self._attr_name = f"{consumption.capitalize()} consumption" - @property def unique_id(self) -> str: """Return the unique ID of the entity. diff --git a/homeassistant/components/devolo_home_control/strings.json b/homeassistant/components/devolo_home_control/strings.json index 1eaf64564c2..be853e2d89d 100644 --- a/homeassistant/components/devolo_home_control/strings.json +++ b/homeassistant/components/devolo_home_control/strings.json @@ -30,5 +30,20 @@ } } } + }, + "entity": { + "binary_sensor": { + "button": { + "name": "Button {key}" + }, + "overload": { + "name": "Overload" + } + }, + "sensor": { + "brightness": { + "name": "Brightness" + } + } } } diff --git a/tests/components/devolo_home_control/mocks.py b/tests/components/devolo_home_control/mocks.py index 33c0a230e90..d611c73cf2c 100644 --- a/tests/components/devolo_home_control/mocks.py +++ b/tests/components/devolo_home_control/mocks.py @@ -70,6 +70,18 @@ class MultiLevelSensorPropertyMock(MultiLevelSensorProperty): self._logger = MagicMock() +class BrightnessSensorPropertyMock(MultiLevelSensorProperty): + """devolo Home Control brightness sensor mock.""" + + def __init__(self, **kwargs: Any) -> None: # pylint: disable=super-init-not-called + """Initialize the mock.""" + self.element_uid = "Test" + self.sensor_type = "light" + self._unit = "%" + self._value = 20 + self._logger = MagicMock() + + class MultiLevelSwitchPropertyMock(MultiLevelSwitchProperty): """devolo Home Control multi level switch mock.""" @@ -138,7 +150,18 @@ class BinarySensorMockOverload(DeviceMock): """Initialize the mock.""" super().__init__() self.binary_sensor_property = {"Overload": BinarySensorPropertyMock()} - self.binary_sensor_property["Overload"].sensor_type = "overload" + self.binary_sensor_property["Overload"].sub_type = "overload" + + +class BrightnessSensorMock(DeviceMock): + """devolo Home Control brightness sensor device mock.""" + + def __init__(self) -> None: + """Initialize the mock.""" + super().__init__() + self.multi_level_sensor_property = { + "devolo.MultiLevelSensor:Test": BrightnessSensorPropertyMock() + } class ClimateMock(DeviceMock): @@ -275,6 +298,19 @@ class HomeControlMockBinarySensor(HomeControlMock): self.publisher.unregister = MagicMock() +class HomeControlMockBrightness(HomeControlMock): + """devolo Home Control gateway mock with brightness devices.""" + + def __init__(self, **kwargs: Any) -> None: + """Initialize the mock.""" + super().__init__() + self.devices = { + "Test": BrightnessSensorMock(), + } + self.publisher = Publisher(self.devices.keys()) + self.publisher.unregister = MagicMock() + + class HomeControlMockClimate(HomeControlMock): """devolo Home Control gateway mock with climate devices.""" diff --git a/tests/components/devolo_home_control/snapshots/test_binary_sensor.ambr b/tests/components/devolo_home_control/snapshots/test_binary_sensor.ambr index 0980a550c7b..c5daed73b33 100644 --- a/tests/components/devolo_home_control/snapshots/test_binary_sensor.ambr +++ b/tests/components/devolo_home_control/snapshots/test_binary_sensor.ambr @@ -88,7 +88,7 @@ 'platform': 'devolo_home_control', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'overload', 'unique_id': 'Overload', 'unit_of_measurement': None, }) @@ -134,7 +134,7 @@ 'platform': 'devolo_home_control', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'button', 'unique_id': 'Test_1', 'unit_of_measurement': None, }) diff --git a/tests/components/devolo_home_control/snapshots/test_sensor.ambr b/tests/components/devolo_home_control/snapshots/test_sensor.ambr index 7f67c70f6ac..3c23385594a 100644 --- a/tests/components/devolo_home_control/snapshots/test_sensor.ambr +++ b/tests/components/devolo_home_control/snapshots/test_sensor.ambr @@ -3,12 +3,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'battery', - 'friendly_name': 'Test Battery level', + 'friendly_name': 'Test Battery', 'state_class': , 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.test_battery_level', + 'entity_id': 'sensor.test_battery', 'last_changed': , 'last_reported': , 'last_updated': , @@ -29,7 +29,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.test_battery_level', + 'entity_id': 'sensor.test_battery', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -41,7 +41,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Battery level', + 'original_name': 'Battery', 'platform': 'devolo_home_control', 'previous_unique_id': None, 'supported_features': 0, @@ -50,16 +50,66 @@ 'unit_of_measurement': '%', }) # --- +# name: test_brightness_sensor + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Brightness', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_brightness', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_brightness_sensor.1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_brightness', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Brightness', + 'platform': 'devolo_home_control', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'brightness', + 'unique_id': 'devolo.MultiLevelSensor:Test', + 'unit_of_measurement': '%', + }) +# --- # name: test_consumption_sensor StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Test Current consumption', + 'friendly_name': 'Test Power', 'state_class': , 'unit_of_measurement': 'W', }), 'context': , - 'entity_id': 'sensor.test_current_consumption', + 'entity_id': 'sensor.test_power', 'last_changed': , 'last_reported': , 'last_updated': , @@ -80,7 +130,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.test_current_consumption', + 'entity_id': 'sensor.test_power', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -92,7 +142,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Current consumption', + 'original_name': 'Power', 'platform': 'devolo_home_control', 'previous_unique_id': None, 'supported_features': 0, @@ -105,12 +155,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'Test Total consumption', + 'friendly_name': 'Test Energy', 'state_class': , 'unit_of_measurement': 'kWh', }), 'context': , - 'entity_id': 'sensor.test_total_consumption', + 'entity_id': 'sensor.test_energy', 'last_changed': , 'last_reported': , 'last_updated': , @@ -131,7 +181,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.test_total_consumption', + 'entity_id': 'sensor.test_energy', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -143,7 +193,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Total consumption', + 'original_name': 'Energy', 'platform': 'devolo_home_control', 'previous_unique_id': None, 'supported_features': 0, diff --git a/tests/components/devolo_home_control/test_sensor.py b/tests/components/devolo_home_control/test_sensor.py index 08b53dae865..ba4c493c366 100644 --- a/tests/components/devolo_home_control/test_sensor.py +++ b/tests/components/devolo_home_control/test_sensor.py @@ -10,7 +10,30 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from . import configure_integration -from .mocks import HomeControlMock, HomeControlMockConsumption, HomeControlMockSensor +from .mocks import ( + HomeControlMock, + HomeControlMockBrightness, + HomeControlMockConsumption, + HomeControlMockSensor, +) + + +async def test_brightness_sensor( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test setup of a brightness sensor device.""" + entry = configure_integration(hass) + test_gateway = HomeControlMockBrightness() + with patch( + "homeassistant.components.devolo_home_control.HomeControl", + side_effect=[test_gateway, HomeControlMock()], + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(f"{SENSOR_DOMAIN}.test_brightness") + assert state == snapshot + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_brightness") == snapshot async def test_temperature_sensor( @@ -45,14 +68,14 @@ async def test_battery_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SENSOR_DOMAIN}.test_battery_level") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_battery") assert state == snapshot - assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_battery_level") == snapshot + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_battery") == snapshot # Emulate websocket message: value changed test_gateway.publisher.dispatch("Test", ("Test", 10, "battery_level")) await hass.async_block_till_done() - assert hass.states.get(f"{SENSOR_DOMAIN}.test_battery_level").state == "10" + assert hass.states.get(f"{SENSOR_DOMAIN}.test_battery").state == "10" async def test_consumption_sensor( @@ -68,37 +91,26 @@ async def test_consumption_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{SENSOR_DOMAIN}.test_current_consumption") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_power") assert state == snapshot - assert ( - entity_registry.async_get(f"{SENSOR_DOMAIN}.test_current_consumption") - == snapshot - ) + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_power") == snapshot - state = hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_energy") assert state == snapshot - assert ( - entity_registry.async_get(f"{SENSOR_DOMAIN}.test_total_consumption") == snapshot - ) + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_energy") == snapshot # Emulate websocket message: value changed test_gateway.devices["Test"].consumption_property["devolo.Meter:Test"].total = 50.0 test_gateway.publisher.dispatch("Test", ("devolo.Meter:Test", 50.0)) await hass.async_block_till_done() - assert hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption").state == "50.0" + assert hass.states.get(f"{SENSOR_DOMAIN}.test_energy").state == "50.0" # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert ( - hass.states.get(f"{SENSOR_DOMAIN}.test_current_consumption").state - == STATE_UNAVAILABLE - ) - assert ( - hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption").state - == STATE_UNAVAILABLE - ) + assert hass.states.get(f"{SENSOR_DOMAIN}.test_power").state == STATE_UNAVAILABLE + assert hass.states.get(f"{SENSOR_DOMAIN}.test_energy").state == STATE_UNAVAILABLE async def test_voltage_sensor(hass: HomeAssistant) -> None: From 2d6d313e5cae60510c3e294110905b9d80ea5e5e Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Wed, 18 Dec 2024 14:50:12 +0100 Subject: [PATCH 419/677] Complete adding custom integration action sections support to hassfest (#132443) --- script/hassfest/services.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/script/hassfest/services.py b/script/hassfest/services.py index 8c9ab5c0c0b..3a0ebed76fe 100644 --- a/script/hassfest/services.py +++ b/script/hassfest/services.py @@ -77,6 +77,8 @@ CUSTOM_INTEGRATION_FIELD_SCHEMA = CORE_INTEGRATION_FIELD_SCHEMA.extend( CUSTOM_INTEGRATION_SECTION_SCHEMA = vol.Schema( { + vol.Optional("description"): str, + vol.Optional("name"): str, vol.Optional("collapsed"): bool, vol.Required("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), } From 943b1d9f08ec451545a483e98cccec815ca72b59 Mon Sep 17 00:00:00 2001 From: adam-the-hero <132444842+adam-the-hero@users.noreply.github.com> Date: Wed, 18 Dec 2024 14:52:25 +0100 Subject: [PATCH 420/677] Add sensors platform to Watergate integration (#133015) --- .../components/watergate/__init__.py | 32 +- .../components/watergate/coordinator.py | 33 +- homeassistant/components/watergate/entity.py | 10 +- .../components/watergate/quality_scale.yaml | 1 + homeassistant/components/watergate/sensor.py | 214 ++++++++ .../components/watergate/strings.json | 33 ++ homeassistant/components/watergate/valve.py | 13 +- tests/components/watergate/conftest.py | 8 + tests/components/watergate/const.py | 19 +- .../watergate/snapshots/test_sensor.ambr | 506 ++++++++++++++++++ tests/components/watergate/test_sensor.py | 150 ++++++ 11 files changed, 1002 insertions(+), 17 deletions(-) create mode 100644 homeassistant/components/watergate/sensor.py create mode 100644 tests/components/watergate/snapshots/test_sensor.ambr create mode 100644 tests/components/watergate/test_sensor.py diff --git a/homeassistant/components/watergate/__init__.py b/homeassistant/components/watergate/__init__.py index 1cf38876556..fa761110339 100644 --- a/homeassistant/components/watergate/__init__.py +++ b/homeassistant/components/watergate/__init__.py @@ -25,8 +25,13 @@ from .coordinator import WatergateDataCoordinator _LOGGER = logging.getLogger(__name__) +WEBHOOK_TELEMETRY_TYPE = "telemetry" +WEBHOOK_VALVE_TYPE = "valve" +WEBHOOK_WIFI_CHANGED_TYPE = "wifi-changed" +WEBHOOK_POWER_SUPPLY_CHANGED_TYPE = "power-supply-changed" PLATFORMS: list[Platform] = [ + Platform.SENSOR, Platform.VALVE, ] @@ -82,7 +87,6 @@ def get_webhook_handler( async def async_webhook_handler( hass: HomeAssistant, webhook_id: str, request: Request ) -> Response | None: - # Handle http post calls to the path. if not request.body_exists: return HomeAssistantView.json( result="No Body", status_code=HTTPStatus.BAD_REQUEST @@ -96,9 +100,29 @@ def get_webhook_handler( body_type = body.get("type") - coordinator_data = coordinator.data - if body_type == Platform.VALVE and coordinator_data: - coordinator_data.valve_state = data.state + if not (coordinator_data := coordinator.data): + pass + elif body_type == WEBHOOK_VALVE_TYPE: + coordinator_data.state.valve_state = data.state + elif body_type == WEBHOOK_TELEMETRY_TYPE: + errors = data.errors or {} + coordinator_data.telemetry.flow = ( + data.flow if "flow" not in errors else None + ) + coordinator_data.telemetry.pressure = ( + data.pressure if "pressure" not in errors else None + ) + coordinator_data.telemetry.water_temperature = ( + data.temperature if "temperature" not in errors else None + ) + elif body_type == WEBHOOK_WIFI_CHANGED_TYPE: + coordinator_data.networking.ip = data.ip + coordinator_data.networking.gateway = data.gateway + coordinator_data.networking.subnet = data.subnet + coordinator_data.networking.ssid = data.ssid + coordinator_data.networking.rssi = data.rssi + elif body_type == WEBHOOK_POWER_SUPPLY_CHANGED_TYPE: + coordinator_data.state.power_supply = data.supply coordinator.async_set_updated_data(coordinator_data) diff --git a/homeassistant/components/watergate/coordinator.py b/homeassistant/components/watergate/coordinator.py index c0b87feed30..1d83b7a3ccb 100644 --- a/homeassistant/components/watergate/coordinator.py +++ b/homeassistant/components/watergate/coordinator.py @@ -1,10 +1,11 @@ """Coordinator for Watergate API.""" +from dataclasses import dataclass from datetime import timedelta import logging from watergate_local_api import WatergateApiException, WatergateLocalApiClient -from watergate_local_api.models import DeviceState +from watergate_local_api.models import DeviceState, NetworkingData, TelemetryData from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -14,7 +15,16 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) -class WatergateDataCoordinator(DataUpdateCoordinator[DeviceState]): +@dataclass +class WatergateAgregatedRequests: + """Class to hold aggregated requests.""" + + state: DeviceState + telemetry: TelemetryData + networking: NetworkingData + + +class WatergateDataCoordinator(DataUpdateCoordinator[WatergateAgregatedRequests]): """Class to manage fetching watergate data.""" def __init__(self, hass: HomeAssistant, api: WatergateLocalApiClient) -> None: @@ -27,9 +37,22 @@ class WatergateDataCoordinator(DataUpdateCoordinator[DeviceState]): ) self.api = api - async def _async_update_data(self) -> DeviceState: + async def _async_update_data(self) -> WatergateAgregatedRequests: try: state = await self.api.async_get_device_state() + telemetry = await self.api.async_get_telemetry_data() + networking = await self.api.async_get_networking() except WatergateApiException as exc: - raise UpdateFailed from exc - return state + raise UpdateFailed(f"Sonic device is unavailable: {exc}") from exc + return WatergateAgregatedRequests(state, telemetry, networking) + + def async_set_updated_data(self, data: WatergateAgregatedRequests) -> None: + """Manually update data, notify listeners and DO NOT reset refresh interval.""" + + self.data = data + self.logger.debug( + "Manually updated %s data", + self.name, + ) + + self.async_update_listeners() diff --git a/homeassistant/components/watergate/entity.py b/homeassistant/components/watergate/entity.py index 977a7fbedb4..8f43643029f 100644 --- a/homeassistant/components/watergate/entity.py +++ b/homeassistant/components/watergate/entity.py @@ -20,11 +20,13 @@ class WatergateEntity(CoordinatorEntity[WatergateDataCoordinator]): """Initialize the entity.""" super().__init__(coordinator) self._api_client = coordinator.api - self._attr_unique_id = f"{coordinator.data.serial_number}.{entity_name}" + self._attr_unique_id = f"{coordinator.data.state.serial_number}.{entity_name}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.data.serial_number)}, + identifiers={(DOMAIN, coordinator.data.state.serial_number)}, name="Sonic", - serial_number=coordinator.data.serial_number, + serial_number=coordinator.data.state.serial_number, manufacturer=MANUFACTURER, - sw_version=coordinator.data.firmware_version if coordinator.data else None, + sw_version=( + coordinator.data.state.firmware_version if coordinator.data else None + ), ) diff --git a/homeassistant/components/watergate/quality_scale.yaml b/homeassistant/components/watergate/quality_scale.yaml index c6027f6a548..b116eff970e 100644 --- a/homeassistant/components/watergate/quality_scale.yaml +++ b/homeassistant/components/watergate/quality_scale.yaml @@ -27,6 +27,7 @@ rules: test-before-configure: done test-before-setup: done unique-config-entry: done + # Silver config-entry-unloading: done log-when-unavailable: todo diff --git a/homeassistant/components/watergate/sensor.py b/homeassistant/components/watergate/sensor.py new file mode 100644 index 00000000000..82ac7cfea92 --- /dev/null +++ b/homeassistant/components/watergate/sensor.py @@ -0,0 +1,214 @@ +"""Support for Watergate sensors.""" + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime, timedelta +from enum import StrEnum +import logging + +from homeassistant.components.sensor import ( + HomeAssistant, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + EntityCategory, + UnitOfPressure, + UnitOfTemperature, + UnitOfTime, + UnitOfVolume, + UnitOfVolumeFlowRate, +) +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.util import dt as dt_util + +from . import WatergateConfigEntry +from .coordinator import WatergateAgregatedRequests, WatergateDataCoordinator +from .entity import WatergateEntity + +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 0 + + +class PowerSupplyMode(StrEnum): + """LED bar mode.""" + + BATTERY = "battery" + EXTERNAL = "external" + BATTERY_EXTERNAL = "battery_external" + + +@dataclass(kw_only=True, frozen=True) +class WatergateSensorEntityDescription(SensorEntityDescription): + """Description for Watergate sensor entities.""" + + value_fn: Callable[ + [WatergateAgregatedRequests], + StateType | datetime | PowerSupplyMode, + ] + + +DESCRIPTIONS: list[WatergateSensorEntityDescription] = [ + WatergateSensorEntityDescription( + value_fn=lambda data: ( + data.state.water_meter.duration + if data.state and data.state.water_meter + else None + ), + translation_key="water_meter_volume", + key="water_meter_volume", + native_unit_of_measurement=UnitOfVolume.LITERS, + device_class=SensorDeviceClass.WATER, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + data.state.water_meter.duration + if data.state and data.state.water_meter + else None + ), + translation_key="water_meter_duration", + key="water_meter_duration", + native_unit_of_measurement=UnitOfTime.MINUTES, + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: data.networking.rssi if data.networking else None, + key="rssi", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + state_class=SensorStateClass.MEASUREMENT, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + dt_util.as_utc( + dt_util.now() - timedelta(microseconds=data.networking.wifi_uptime) + ) + if data.networking + else None + ), + translation_key="wifi_up_since", + key="wifi_up_since", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + dt_util.as_utc( + dt_util.now() - timedelta(microseconds=data.networking.mqtt_uptime) + ) + if data.networking + else None + ), + translation_key="mqtt_up_since", + key="mqtt_up_since", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + data.telemetry.water_temperature if data.telemetry else None + ), + translation_key="water_temperature", + key="water_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: data.telemetry.pressure if data.telemetry else None, + translation_key="water_pressure", + key="water_pressure", + native_unit_of_measurement=UnitOfPressure.MBAR, + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + data.telemetry.flow / 1000 + if data.telemetry and data.telemetry.flow is not None + else None + ), + key="water_flow_rate", + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + dt_util.as_utc(dt_util.now() - timedelta(seconds=data.state.uptime)) + if data.state + else None + ), + translation_key="up_since", + key="up_since", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + ), + WatergateSensorEntityDescription( + value_fn=lambda data: ( + PowerSupplyMode(data.state.power_supply.replace("+", "_")) + if data.state + else None + ), + translation_key="power_supply_mode", + key="power_supply_mode", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.ENUM, + options=[member.value for member in PowerSupplyMode], + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WatergateConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up all entries for Watergate Platform.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + SonicSensor(coordinator, description) for description in DESCRIPTIONS + ) + + +class SonicSensor(WatergateEntity, SensorEntity): + """Define a Sonic Sensor entity.""" + + entity_description: WatergateSensorEntityDescription + + def __init__( + self, + coordinator: WatergateDataCoordinator, + entity_description: WatergateSensorEntityDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator, entity_description.key) + self.entity_description = entity_description + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return ( + super().available + and self.entity_description.value_fn(self.coordinator.data) is not None + ) + + @property + def native_value(self) -> str | int | float | datetime | PowerSupplyMode | None: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/watergate/strings.json b/homeassistant/components/watergate/strings.json index 2a75c4d103d..c312525e420 100644 --- a/homeassistant/components/watergate/strings.json +++ b/homeassistant/components/watergate/strings.json @@ -17,5 +17,38 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } + }, + "entity": { + "sensor": { + "water_meter_volume": { + "name": "Water meter volume" + }, + "water_meter_duration": { + "name": "Water meter duration" + }, + "wifi_up_since": { + "name": "Wi-Fi up since" + }, + "mqtt_up_since": { + "name": "MQTT up since" + }, + "water_temperature": { + "name": "Water temperature" + }, + "water_pressure": { + "name": "Water pressure" + }, + "up_since": { + "name": "Up since" + }, + "power_supply_mode": { + "name": "Power supply mode", + "state": { + "battery": "Battery", + "external": "Mains", + "battery_external": "Battery and mains" + } + } + } } } diff --git a/homeassistant/components/watergate/valve.py b/homeassistant/components/watergate/valve.py index aecaf3fbca9..556b53e1d3c 100644 --- a/homeassistant/components/watergate/valve.py +++ b/homeassistant/components/watergate/valve.py @@ -43,7 +43,9 @@ class SonicValve(WatergateEntity, ValveEntity): ) -> None: """Initialize the sensor.""" super().__init__(coordinator, ENTITY_NAME) - self._valve_state = coordinator.data.valve_state if coordinator.data else None + self._valve_state = ( + coordinator.data.state.valve_state if coordinator.data.state else None + ) @property def is_closed(self) -> bool: @@ -65,7 +67,9 @@ class SonicValve(WatergateEntity, ValveEntity): """Handle data update.""" self._attr_available = self.coordinator.data is not None self._valve_state = ( - self.coordinator.data.valve_state if self.coordinator.data else None + self.coordinator.data.state.valve_state + if self.coordinator.data.state + else None ) self.async_write_ha_state() @@ -80,3 +84,8 @@ class SonicValve(WatergateEntity, ValveEntity): await self._api_client.async_set_valve_state(ValveState.CLOSED) self._valve_state = ValveState.CLOSING self.async_write_ha_state() + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return super().available and self.coordinator.data.state is not None diff --git a/tests/components/watergate/conftest.py b/tests/components/watergate/conftest.py index d29b90431a4..6d40a4b7152 100644 --- a/tests/components/watergate/conftest.py +++ b/tests/components/watergate/conftest.py @@ -9,7 +9,9 @@ from homeassistant.const import CONF_IP_ADDRESS from .const import ( DEFAULT_DEVICE_STATE, + DEFAULT_NETWORKING_STATE, DEFAULT_SERIAL_NUMBER, + DEFAULT_TELEMETRY_STATE, MOCK_CONFIG, MOCK_WEBHOOK_ID, ) @@ -35,6 +37,12 @@ def mock_watergate_client() -> Generator[AsyncMock]: mock_client_instance.async_get_device_state = AsyncMock( return_value=DEFAULT_DEVICE_STATE ) + mock_client_instance.async_get_networking = AsyncMock( + return_value=DEFAULT_NETWORKING_STATE + ) + mock_client_instance.async_get_telemetry_data = AsyncMock( + return_value=DEFAULT_TELEMETRY_STATE + ) yield mock_client_instance diff --git a/tests/components/watergate/const.py b/tests/components/watergate/const.py index 4297b3321ad..0f7cc12c14b 100644 --- a/tests/components/watergate/const.py +++ b/tests/components/watergate/const.py @@ -1,6 +1,7 @@ """Constants for the Watergate tests.""" -from watergate_local_api.models import DeviceState +from watergate_local_api.models import DeviceState, NetworkingData, TelemetryData +from watergate_local_api.models.water_meter import WaterMeter from homeassistant.const import CONF_IP_ADDRESS, CONF_NAME, CONF_WEBHOOK_ID @@ -22,6 +23,20 @@ DEFAULT_DEVICE_STATE = DeviceState( "battery", "1.0.0", 100, - {"volume": 1.2, "duration": 100}, + WaterMeter(1.2, 100), DEFAULT_SERIAL_NUMBER, ) + +DEFAULT_NETWORKING_STATE = NetworkingData( + True, + True, + "192.168.1.127", + "192.168.1.1", + "255.255.255.0", + "Sonic", + -50, + 2137, + 1910, +) + +DEFAULT_TELEMETRY_STATE = TelemetryData(0.0, 100, 28.32, None, []) diff --git a/tests/components/watergate/snapshots/test_sensor.ambr b/tests/components/watergate/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..a8969798105 --- /dev/null +++ b/tests/components/watergate/snapshots/test_sensor.ambr @@ -0,0 +1,506 @@ +# serializer version: 1 +# name: test_sensor[sensor.sonic_mqtt_up_since-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.sonic_mqtt_up_since', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'MQTT up since', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'mqtt_up_since', + 'unique_id': 'a63182948ce2896a.mqtt_up_since', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sonic_mqtt_up_since-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Sonic MQTT up since', + }), + 'context': , + 'entity_id': 'sensor.sonic_mqtt_up_since', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-09T11:59:59+00:00', + }) +# --- +# name: test_sensor[sensor.sonic_power_supply_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'battery', + 'external', + 'battery_external', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.sonic_power_supply_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power supply mode', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_supply_mode', + 'unique_id': 'a63182948ce2896a.power_supply_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sonic_power_supply_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Sonic Power supply mode', + 'options': list([ + 'battery', + 'external', + 'battery_external', + ]), + }), + 'context': , + 'entity_id': 'sensor.sonic_power_supply_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'battery', + }) +# --- +# name: test_sensor[sensor.sonic_signal_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.sonic_signal_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Signal strength', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a63182948ce2896a.rssi', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_sensor[sensor.sonic_signal_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'Sonic Signal strength', + 'state_class': , + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.sonic_signal_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-50', + }) +# --- +# name: test_sensor[sensor.sonic_up_since-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.sonic_up_since', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Up since', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'up_since', + 'unique_id': 'a63182948ce2896a.up_since', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sonic_up_since-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Sonic Up since', + }), + 'context': , + 'entity_id': 'sensor.sonic_up_since', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-09T11:58:20+00:00', + }) +# --- +# name: test_sensor[sensor.sonic_volume_flow_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sonic_volume_flow_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Volume flow rate', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a63182948ce2896a.water_flow_rate', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sonic_volume_flow_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volume_flow_rate', + 'friendly_name': 'Sonic Volume flow rate', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sonic_volume_flow_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[sensor.sonic_water_meter_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sonic_water_meter_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water meter duration', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_meter_duration', + 'unique_id': 'a63182948ce2896a.water_meter_duration', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sonic_water_meter_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Sonic Water meter duration', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sonic_water_meter_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensor[sensor.sonic_water_meter_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sonic_water_meter_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water meter volume', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_meter_volume', + 'unique_id': 'a63182948ce2896a.water_meter_volume', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sonic_water_meter_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'Sonic Water meter volume', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sonic_water_meter_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensor[sensor.sonic_water_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sonic_water_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water pressure', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_pressure', + 'unique_id': 'a63182948ce2896a.water_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sonic_water_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Sonic Water pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sonic_water_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensor[sensor.sonic_water_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sonic_water_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water temperature', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_temperature', + 'unique_id': 'a63182948ce2896a.water_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sonic_water_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Sonic Water temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sonic_water_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28.32', + }) +# --- +# name: test_sensor[sensor.sonic_wi_fi_up_since-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.sonic_wi_fi_up_since', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wi-Fi up since', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi_up_since', + 'unique_id': 'a63182948ce2896a.wifi_up_since', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sonic_wi_fi_up_since-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Sonic Wi-Fi up since', + }), + 'context': , + 'entity_id': 'sensor.sonic_wi_fi_up_since', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-09T11:59:59+00:00', + }) +# --- diff --git a/tests/components/watergate/test_sensor.py b/tests/components/watergate/test_sensor.py new file mode 100644 index 00000000000..58632c7548b --- /dev/null +++ b/tests/components/watergate/test_sensor.py @@ -0,0 +1,150 @@ +"""Tests for the Watergate valve platform.""" + +from collections.abc import Generator + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import EntityCategory, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import DEFAULT_NETWORKING_STATE, DEFAULT_TELEMETRY_STATE, MOCK_WEBHOOK_ID + +from tests.common import AsyncMock, MockConfigEntry, patch, snapshot_platform +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test states of the sensor.""" + freezer.move_to("2021-01-09 12:00:00+00:00") + with patch("homeassistant.components.watergate.PLATFORMS", [Platform.SENSOR]): + await init_integration(hass, mock_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_entry.entry_id) + + +async def test_diagnostics_are_disabled_by_default( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test if all diagnostic entities are disabled by default.""" + with patch("homeassistant.components.watergate.PLATFORMS", [Platform.SENSOR]): + await init_integration(hass, mock_entry) + + entries = [ + entry + for entry in entity_registry.entities.get_entries_for_config_entry_id( + mock_entry.entry_id + ) + if entry.entity_category == EntityCategory.DIAGNOSTIC + ] + + assert len(entries) == 5 + for entry in entries: + assert entry.disabled + + +async def test_telemetry_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test if water flow webhook is handled correctly.""" + await init_integration(hass, mock_entry) + + def assert_state(entity_id: str, expected_state: str): + state = hass.states.get(entity_id) + assert state.state == str(expected_state) + + assert_state("sensor.sonic_volume_flow_rate", DEFAULT_TELEMETRY_STATE.flow) + assert_state("sensor.sonic_water_pressure", DEFAULT_TELEMETRY_STATE.pressure) + assert_state( + "sensor.sonic_water_temperature", DEFAULT_TELEMETRY_STATE.water_temperature + ) + + telemetry_change_data = { + "type": "telemetry", + "data": {"flow": 2137, "pressure": 1910, "temperature": 20}, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=telemetry_change_data) + + await hass.async_block_till_done() + + assert_state("sensor.sonic_volume_flow_rate", "2.137") + assert_state("sensor.sonic_water_pressure", "1910") + assert_state("sensor.sonic_water_temperature", "20") + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_wifi_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test if water flow webhook is handled correctly.""" + await init_integration(hass, mock_entry) + + def assert_state(entity_id: str, expected_state: str): + state = hass.states.get(entity_id) + assert state.state == str(expected_state) + + assert_state("sensor.sonic_signal_strength", DEFAULT_NETWORKING_STATE.rssi) + + wifi_change_data = { + "type": "wifi-changed", + "data": { + "ip": "192.168.2.137", + "gateway": "192.168.2.1", + "ssid": "Sonic 2", + "rssi": -70, + "subnet": "255.255.255.0", + }, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=wifi_change_data) + + await hass.async_block_till_done() + + assert_state("sensor.sonic_signal_strength", "-70") + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_power_supply_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], +) -> None: + """Test if water flow webhook is handled correctly.""" + await init_integration(hass, mock_entry) + entity_id = "sensor.sonic_power_supply_mode" + registered_entity = hass.states.get(entity_id) + assert registered_entity + assert registered_entity.state == "battery" + + power_supply_change_data = { + "type": "power-supply-changed", + "data": {"supply": "external"}, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=power_supply_change_data) + + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == "external" From 3132700492bea7ab9ba6c42ba0689ef18a6a55e6 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Wed, 18 Dec 2024 14:02:44 +0000 Subject: [PATCH 421/677] Add ability to translate ENUM sensor states in Unifi integration (#131921) --- homeassistant/components/unifi/const.py | 25 ++- homeassistant/components/unifi/sensor.py | 4 +- homeassistant/components/unifi/strings.json | 20 +++ .../unifi/snapshots/test_sensor.ambr | 156 +++++++++--------- tests/components/unifi/test_sensor.py | 5 +- 5 files changed, 113 insertions(+), 97 deletions(-) diff --git a/homeassistant/components/unifi/const.py b/homeassistant/components/unifi/const.py index 2b16895a9a8..bbd03b070a4 100644 --- a/homeassistant/components/unifi/const.py +++ b/homeassistant/components/unifi/const.py @@ -50,17 +50,16 @@ DPI_SWITCH = "dpi" OUTLET_SWITCH = "outlet" DEVICE_STATES = { - DeviceState.DISCONNECTED: "Disconnected", - DeviceState.CONNECTED: "Connected", - DeviceState.PENDING: "Pending", - DeviceState.FIRMWARE_MISMATCH: "Firmware Mismatch", - DeviceState.UPGRADING: "Upgrading", - DeviceState.PROVISIONING: "Provisioning", - DeviceState.HEARTBEAT_MISSED: "Heartbeat Missed", - DeviceState.ADOPTING: "Adopting", - DeviceState.DELETING: "Deleting", - DeviceState.INFORM_ERROR: "Inform Error", - DeviceState.ADOPTION_FALIED: "Adoption Failed", - DeviceState.ISOLATED: "Isolated", - DeviceState.UNKNOWN: "Unknown", + DeviceState.DISCONNECTED: "disconnected", + DeviceState.CONNECTED: "connected", + DeviceState.PENDING: "pending", + DeviceState.FIRMWARE_MISMATCH: "firmware_mismatch", + DeviceState.UPGRADING: "upgrading", + DeviceState.PROVISIONING: "provisioning", + DeviceState.HEARTBEAT_MISSED: "heartbeat_missed", + DeviceState.ADOPTING: "adopting", + DeviceState.DELETING: "deleting", + DeviceState.INFORM_ERROR: "inform_error", + DeviceState.ADOPTION_FALIED: "adoption_failed", + DeviceState.ISOLATED: "isolated", } diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index 74d49db6e4e..194a8575174 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -205,9 +205,9 @@ def async_client_is_connected_fn(hub: UnifiHub, obj_id: str) -> bool: @callback -def async_device_state_value_fn(hub: UnifiHub, device: Device) -> str: +def async_device_state_value_fn(hub: UnifiHub, device: Device) -> str | None: """Retrieve the state of the device.""" - return DEVICE_STATES[device.state] + return DEVICE_STATES.get(device.state) @callback diff --git a/homeassistant/components/unifi/strings.json b/homeassistant/components/unifi/strings.json index 1c7317c4267..f9315318d1e 100644 --- a/homeassistant/components/unifi/strings.json +++ b/homeassistant/components/unifi/strings.json @@ -33,6 +33,26 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, + "entity": { + "sensor": { + "device_state": { + "state": { + "disconnected": "[%key:common::state::disconnected%]", + "connected": "[%key:common::state::connected%]", + "pending": "Pending", + "firmware_mismatch": "Firmware mismatch", + "upgrading": "Upgrading", + "provisioning": "Provisioning", + "heartbeat_missed": "Heartbeat missed", + "adopting": "Adopting", + "deleting": "Deleting", + "inform_error": "Inform error", + "adoption_failed": "Adoption failed", + "isolated": "Isolated" + } + } + } + }, "options": { "abort": { "integration_not_setup": "UniFi integration is not set up" diff --git a/tests/components/unifi/snapshots/test_sensor.ambr b/tests/components/unifi/snapshots/test_sensor.ambr index fc86a57a294..e14658b2b96 100644 --- a/tests/components/unifi/snapshots/test_sensor.ambr +++ b/tests/components/unifi/snapshots/test_sensor.ambr @@ -55,19 +55,18 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'config_entry_id': , @@ -103,19 +102,18 @@ 'device_class': 'enum', 'friendly_name': 'Device State', 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'context': , @@ -123,7 +121,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Connected', + 'state': 'connected', }) # --- # name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_temperature-entry] @@ -536,19 +534,18 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'config_entry_id': , @@ -584,19 +581,18 @@ 'device_class': 'enum', 'friendly_name': 'Dummy USP-PDU-Pro State', 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'context': , @@ -604,7 +600,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Connected', + 'state': 'connected', }) # --- # name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_uptime-entry] @@ -1601,19 +1597,18 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'config_entry_id': , @@ -1649,19 +1644,18 @@ 'device_class': 'enum', 'friendly_name': 'mock-name State', 'options': list([ - 'Disconnected', - 'Connected', - 'Pending', - 'Firmware Mismatch', - 'Upgrading', - 'Provisioning', - 'Heartbeat Missed', - 'Adopting', - 'Deleting', - 'Inform Error', - 'Adoption Failed', - 'Isolated', - 'Unknown', + 'disconnected', + 'connected', + 'pending', + 'firmware_mismatch', + 'upgrading', + 'provisioning', + 'heartbeat_missed', + 'adopting', + 'deleting', + 'inform_error', + 'adoption_failed', + 'isolated', ]), }), 'context': , @@ -1669,7 +1663,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Connected', + 'state': 'connected', }) # --- # name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_uptime-entry] diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index 3c94d12018d..5e47d263079 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -30,6 +30,7 @@ from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( ATTR_DEVICE_CLASS, STATE_UNAVAILABLE, + STATE_UNKNOWN, EntityCategory, Platform, ) @@ -891,7 +892,9 @@ async def test_device_state( for i in list(map(int, DeviceState)): device["state"] = i mock_websocket_message(message=MessageKey.DEVICE, data=device) - assert hass.states.get("sensor.device_state").state == DEVICE_STATES[i] + assert hass.states.get("sensor.device_state").state == DEVICE_STATES.get( + i, STATE_UNKNOWN + ) @pytest.mark.parametrize( From fce6d6246f85928281369b28de7369e4c8317234 Mon Sep 17 00:00:00 2001 From: Philip Baylas Date: Wed, 18 Dec 2024 14:07:03 +0000 Subject: [PATCH 422/677] Change log level of connection failure to info (#132625) Co-authored-by: Franck Nijhof --- homeassistant/components/plex/server.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/homeassistant/components/plex/server.py b/homeassistant/components/plex/server.py index 0716b3606af..eab1d086d4c 100644 --- a/homeassistant/components/plex/server.py +++ b/homeassistant/components/plex/server.py @@ -425,9 +425,7 @@ class PlexServer: client = resource.connect(timeout=3) _LOGGER.debug("Resource connection successful to plex.tv: %s", client) except NotFound: - _LOGGER.error( - "Resource connection failed to plex.tv: %s", resource.name - ) + _LOGGER.info("Resource connection failed to plex.tv: %s", resource.name) else: client.proxyThroughServer(value=False, server=self._plex_server) self._client_device_cache[client.machineIdentifier] = client From 1e075cdac757115db8b2d0ae0444ea4a39112eca Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Wed, 18 Dec 2024 15:21:17 +0100 Subject: [PATCH 423/677] Add diagnostics to slide_local (#133488) --- .../components/slide_local/diagnostics.py | 27 +++++++++++++ .../components/slide_local/quality_scale.yaml | 2 +- .../snapshots/test_diagnostics.ambr | 39 +++++++++++++++++++ .../slide_local/test_diagnostics.py | 34 ++++++++++++++++ 4 files changed, 101 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/slide_local/diagnostics.py create mode 100644 tests/components/slide_local/snapshots/test_diagnostics.ambr create mode 100644 tests/components/slide_local/test_diagnostics.py diff --git a/homeassistant/components/slide_local/diagnostics.py b/homeassistant/components/slide_local/diagnostics.py new file mode 100644 index 00000000000..2655cb5fada --- /dev/null +++ b/homeassistant/components/slide_local/diagnostics.py @@ -0,0 +1,27 @@ +"""Provides diagnostics for slide_local.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from . import SlideConfigEntry + +TO_REDACT = [ + CONF_PASSWORD, +] + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: SlideConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = config_entry.runtime_data.data + + return { + "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), + "slide_data": data, + } diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 4eda62f6497..887b90b6b11 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -43,7 +43,7 @@ rules: entity-disabled-by-default: done discovery: done stale-devices: todo - diagnostics: todo + diagnostics: done exception-translations: done icon-translations: todo reconfiguration-flow: todo diff --git a/tests/components/slide_local/snapshots/test_diagnostics.ambr b/tests/components/slide_local/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..63dab3f5a66 --- /dev/null +++ b/tests/components/slide_local/snapshots/test_diagnostics.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'api_version': 2, + 'host': '127.0.0.2', + 'mac': '12:34:56:78:90:ab', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'slide_local', + 'entry_id': 'ce5f5431554d101905d31797e1232da8', + 'minor_version': 1, + 'options': dict({ + 'invert_position': False, + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'slide', + 'unique_id': '12:34:56:78:90:ab', + 'version': 1, + }), + 'slide_data': dict({ + 'board_rev': 1, + 'calib_time': 10239, + 'curtain_type': 0, + 'device_name': 'slide bedroom', + 'mac': '1234567890ab', + 'pos': 0, + 'slide_id': 'slide_1234567890ab', + 'state': 'open', + 'touch_go': True, + 'zone_name': 'bedroom', + }), + }) +# --- diff --git a/tests/components/slide_local/test_diagnostics.py b/tests/components/slide_local/test_diagnostics.py new file mode 100644 index 00000000000..3e11af378c5 --- /dev/null +++ b/tests/components/slide_local/test_diagnostics.py @@ -0,0 +1,34 @@ +"""Test slide_local diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from . import setup_platform + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_slide_api: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_platform( + hass, mock_config_entry, [Platform.BUTTON, Platform.COVER, Platform.SWITCH] + ) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot(exclude=props("created_at", "modified_at")) From c9f1829c0bf0b0cad5427614b40f1bc4aadd2c4f Mon Sep 17 00:00:00 2001 From: mkmer Date: Wed, 18 Dec 2024 09:27:40 -0500 Subject: [PATCH 424/677] Add (de)humidifier platform to Honeywell (#132287) Co-authored-by: Joost Lekkerkerker --- .../components/honeywell/__init__.py | 2 +- .../components/honeywell/humidifier.py | 136 ++++++++++++++++++ .../components/honeywell/strings.json | 8 ++ tests/components/honeywell/__init__.py | 2 +- tests/components/honeywell/conftest.py | 23 ++- .../honeywell/snapshots/test_humidity.ambr | 39 +++++ tests/components/honeywell/test_climate.py | 2 +- tests/components/honeywell/test_humidity.py | 110 ++++++++++++++ 8 files changed, 318 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/honeywell/humidifier.py create mode 100644 tests/components/honeywell/snapshots/test_humidity.ambr create mode 100644 tests/components/honeywell/test_humidity.py diff --git a/homeassistant/components/honeywell/__init__.py b/homeassistant/components/honeywell/__init__.py index a8ee5975914..eb89ba2a681 100644 --- a/homeassistant/components/honeywell/__init__.py +++ b/homeassistant/components/honeywell/__init__.py @@ -22,7 +22,7 @@ from .const import ( ) UPDATE_LOOP_SLEEP_TIME = 5 -PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] +PLATFORMS = [Platform.CLIMATE, Platform.HUMIDIFIER, Platform.SENSOR, Platform.SWITCH] MIGRATE_OPTIONS_KEYS = {CONF_COOL_AWAY_TEMPERATURE, CONF_HEAT_AWAY_TEMPERATURE} diff --git a/homeassistant/components/honeywell/humidifier.py b/homeassistant/components/honeywell/humidifier.py new file mode 100644 index 00000000000..e94ba465c30 --- /dev/null +++ b/homeassistant/components/honeywell/humidifier.py @@ -0,0 +1,136 @@ +"""Support for Honeywell (de)humidifiers.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from aiosomecomfort.device import Device + +from homeassistant.components.humidifier import ( + HumidifierDeviceClass, + HumidifierEntity, + HumidifierEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import HoneywellConfigEntry +from .const import DOMAIN + +HUMIDIFIER_KEY = "humidifier" +DEHUMIDIFIER_KEY = "dehumidifier" + + +@dataclass(frozen=True, kw_only=True) +class HoneywellHumidifierEntityDescription(HumidifierEntityDescription): + """Describes a Honeywell humidifier entity.""" + + current_humidity: Callable[[Device], Any] + current_set_humidity: Callable[[Device], Any] + max_humidity: Callable[[Device], Any] + min_humidity: Callable[[Device], Any] + set_humidity: Callable[[Device, Any], Any] + mode: Callable[[Device], Any] + off: Callable[[Device], Any] + on: Callable[[Device], Any] + + +HUMIDIFIERS: dict[str, HoneywellHumidifierEntityDescription] = { + "Humidifier": HoneywellHumidifierEntityDescription( + key=HUMIDIFIER_KEY, + translation_key=HUMIDIFIER_KEY, + current_humidity=lambda device: device.current_humidity, + set_humidity=lambda device, humidity: device.set_humidifier_setpoint(humidity), + min_humidity=lambda device: device.humidifier_lower_limit, + max_humidity=lambda device: device.humidifier_upper_limit, + current_set_humidity=lambda device: device.humidifier_setpoint, + mode=lambda device: device.humidifier_mode, + off=lambda device: device.set_humidifier_off(), + on=lambda device: device.set_humidifier_auto(), + device_class=HumidifierDeviceClass.HUMIDIFIER, + ), + "Dehumidifier": HoneywellHumidifierEntityDescription( + key=DEHUMIDIFIER_KEY, + translation_key=DEHUMIDIFIER_KEY, + current_humidity=lambda device: device.current_humidity, + set_humidity=lambda device, humidity: device.set_dehumidifier_setpoint( + humidity + ), + min_humidity=lambda device: device.dehumidifier_lower_limit, + max_humidity=lambda device: device.dehumidifier_upper_limit, + current_set_humidity=lambda device: device.dehumidifier_setpoint, + mode=lambda device: device.dehumidifier_mode, + off=lambda device: device.set_dehumidifier_off(), + on=lambda device: device.set_dehumidifier_auto(), + device_class=HumidifierDeviceClass.DEHUMIDIFIER, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HoneywellConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Honeywell (de)humidifier dynamically.""" + data = config_entry.runtime_data + entities: list = [] + for device in data.devices.values(): + if device.has_humidifier: + entities.append(HoneywellHumidifier(device, HUMIDIFIERS["Humidifier"])) + if device.has_dehumidifier: + entities.append(HoneywellHumidifier(device, HUMIDIFIERS["Dehumidifier"])) + + async_add_entities(entities) + + +class HoneywellHumidifier(HumidifierEntity): + """Representation of a Honeywell US (De)Humidifier.""" + + entity_description: HoneywellHumidifierEntityDescription + _attr_has_entity_name = True + + def __init__( + self, device: Device, description: HoneywellHumidifierEntityDescription + ) -> None: + """Initialize the (De)Humidifier.""" + self._device = device + self.entity_description = description + self._attr_unique_id = f"{device.deviceid}_{description.key}" + self._attr_min_humidity = description.min_humidity(device) + self._attr_max_humidity = description.max_humidity(device) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device.deviceid)}, + name=device.name, + manufacturer="Honeywell", + ) + + @property + def is_on(self) -> bool: + """Return the device is on or off.""" + return self.entity_description.mode(self._device) != 0 + + @property + def target_humidity(self) -> int | None: + """Return the humidity we try to reach.""" + return self.entity_description.current_set_humidity(self._device) + + @property + def current_humidity(self) -> int | None: + """Return the current humidity.""" + return self.entity_description.current_humidity(self._device) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the device on.""" + await self.entity_description.on(self._device) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the device off.""" + await self.entity_description.off(self._device) + + async def async_set_humidity(self, humidity: int) -> None: + """Set new target humidity.""" + await self.entity_description.set_humidity(self._device, humidity) diff --git a/homeassistant/components/honeywell/strings.json b/homeassistant/components/honeywell/strings.json index a64f1a6fce0..2538e7101a1 100644 --- a/homeassistant/components/honeywell/strings.json +++ b/homeassistant/components/honeywell/strings.json @@ -61,6 +61,14 @@ } } } + }, + "humidifier": { + "humidifier": { + "name": "[%key:component::humidifier::title%]" + }, + "dehumidifier": { + "name": "[%key:component::humidifier::entity_component::dehumidifier::name%]" + } } }, "exceptions": { diff --git a/tests/components/honeywell/__init__.py b/tests/components/honeywell/__init__.py index 98fcaa551bf..94022667e0e 100644 --- a/tests/components/honeywell/__init__.py +++ b/tests/components/honeywell/__init__.py @@ -1,4 +1,4 @@ -"""Tests for honeywell component.""" +"""Tests for Honeywell component.""" from unittest.mock import MagicMock diff --git a/tests/components/honeywell/conftest.py b/tests/components/honeywell/conftest.py index e48664db9ae..dd3341aa75c 100644 --- a/tests/components/honeywell/conftest.py +++ b/tests/components/honeywell/conftest.py @@ -127,7 +127,16 @@ def device(): mock_device.refresh = AsyncMock() mock_device.heat_away_temp = HEATAWAY mock_device.cool_away_temp = COOLAWAY - + mock_device.has_humidifier = False + mock_device.has_dehumidifier = False + mock_device.humidifier_upper_limit = 60 + mock_device.humidifier_lower_limit = 10 + mock_device.humidifier_setpoint = 20 + mock_device.dehumidifier_mode = 1 + mock_device.dehumidifier_upper_limit = 55 + mock_device.dehumidifier_lower_limit = 15 + mock_device.dehumidifier_setpoint = 30 + mock_device.dehumidifier_mode = 1 mock_device.raw_dr_data = {"CoolSetpLimit": None, "HeatSetpLimit": None} return mock_device @@ -149,6 +158,8 @@ def device_with_outdoor_sensor(): mock_device.temperature_unit = "C" mock_device.outdoor_temperature = OUTDOORTEMP mock_device.outdoor_humidity = OUTDOORHUMIDITY + mock_device.has_humidifier = False + mock_device.has_dehumidifier = False mock_device.raw_ui_data = { "SwitchOffAllowed": True, "SwitchAutoAllowed": True, @@ -188,6 +199,16 @@ def another_device(): mock_device.mac_address = "macaddress1" mock_device.outdoor_temperature = None mock_device.outdoor_humidity = None + mock_device.has_humidifier = False + mock_device.has_dehumidifier = False + mock_device.humidifier_upper_limit = 60 + mock_device.humidifier_lower_limit = 10 + mock_device.humidifier_setpoint = 20 + mock_device.dehumidifier_mode = 1 + mock_device.dehumidifier_upper_limit = 55 + mock_device.dehumidifier_lower_limit = 15 + mock_device.dehumidifier_setpoint = 30 + mock_device.dehumidifier_mode = 1 mock_device.raw_ui_data = { "SwitchOffAllowed": True, "SwitchAutoAllowed": True, diff --git a/tests/components/honeywell/snapshots/test_humidity.ambr b/tests/components/honeywell/snapshots/test_humidity.ambr new file mode 100644 index 00000000000..369167b8c1e --- /dev/null +++ b/tests/components/honeywell/snapshots/test_humidity.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_static_attributes[dehumidifier] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 50, + 'device_class': 'dehumidifier', + 'friendly_name': 'device1 Dehumidifier', + 'humidity': 30, + 'max_humidity': 55, + 'min_humidity': 15, + 'supported_features': , + }), + 'context': , + 'entity_id': 'humidifier.device1_dehumidifier', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_static_attributes[humidifier] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 50, + 'device_class': 'humidifier', + 'friendly_name': 'device1 Humidifier', + 'humidity': 20, + 'max_humidity': 60, + 'min_humidity': 10, + 'supported_features': , + }), + 'context': , + 'entity_id': 'humidifier.device1_humidifier', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/honeywell/test_climate.py b/tests/components/honeywell/test_climate.py index 73c5ff33dbc..57cdfaa9a23 100644 --- a/tests/components/honeywell/test_climate.py +++ b/tests/components/honeywell/test_climate.py @@ -1,4 +1,4 @@ -"""Test the Whirlpool Sixth Sense climate domain.""" +"""Test the Honeywell climate domain.""" import datetime from unittest.mock import MagicMock diff --git a/tests/components/honeywell/test_humidity.py b/tests/components/honeywell/test_humidity.py new file mode 100644 index 00000000000..2e1f8cec6aa --- /dev/null +++ b/tests/components/honeywell/test_humidity.py @@ -0,0 +1,110 @@ +"""Test the Honeywell humidity domain.""" + +from unittest.mock import MagicMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.humidifier import ( + ATTR_HUMIDITY, + DOMAIN as HUMIDIFIER_DOMAIN, + SERVICE_SET_HUMIDITY, +) +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration + + +async def test_humidifier_service_calls( + hass: HomeAssistant, device: MagicMock, config_entry: MagicMock +) -> None: + """Test the setup of the climate entities when there are no additional options available.""" + device.has_humidifier = True + await init_integration(hass, config_entry) + entity_id = f"humidifier.{device.name}_humidifier" + assert hass.states.get(f"humidifier.{device.name}_dehumidifier") is None + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + device.set_humidifier_auto.assert_called_once() + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + device.set_humidifier_off.assert_called_once() + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_SET_HUMIDITY, + {ATTR_ENTITY_ID: entity_id, ATTR_HUMIDITY: 40}, + blocking=True, + ) + device.set_humidifier_setpoint.assert_called_once_with(40) + + +async def test_dehumidifier_service_calls( + hass: HomeAssistant, device: MagicMock, config_entry: MagicMock +) -> None: + """Test the setup of the climate entities when there are no additional options available.""" + device.has_dehumidifier = True + await init_integration(hass, config_entry) + entity_id = f"humidifier.{device.name}_dehumidifier" + assert hass.states.get(f"humidifier.{device.name}_humidifier") is None + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + device.set_dehumidifier_auto.assert_called_once() + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + device.set_dehumidifier_off.assert_called_once() + + await hass.services.async_call( + HUMIDIFIER_DOMAIN, + SERVICE_SET_HUMIDITY, + {ATTR_ENTITY_ID: entity_id, ATTR_HUMIDITY: 40}, + blocking=True, + ) + device.set_dehumidifier_setpoint.assert_called_once_with(40) + + +async def test_static_attributes( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device: MagicMock, + config_entry: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test static humidifier attributes.""" + device.has_dehumidifier = True + device.has_humidifier = True + await init_integration(hass, config_entry) + + entity_id_dehumidifier = f"humidifier.{device.name}_dehumidifier" + entity_id_humidifier = f"humidifier.{device.name}_humidifier" + entry = entity_registry.async_get(entity_id_dehumidifier) + assert entry + + state = hass.states.get(entity_id_dehumidifier) + + assert state == snapshot(name="dehumidifier") + + state = hass.states.get(entity_id_humidifier) + + assert state == snapshot(name="humidifier") From d6c201de4aa1825d02369535305f6620aa63eed8 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Wed, 18 Dec 2024 15:33:11 +0100 Subject: [PATCH 425/677] Add exceptions and translations for slide_local (#133490) --- .../components/slide_local/button.py | 24 ++++++++++- .../components/slide_local/quality_scale.yaml | 4 +- .../components/slide_local/strings.json | 6 +++ .../components/slide_local/switch.py | 43 +++++++++++++++++-- tests/components/slide_local/test_button.py | 42 ++++++++++++++++++ tests/components/slide_local/test_switch.py | 42 ++++++++++++++++++ 6 files changed, 153 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/slide_local/button.py b/homeassistant/components/slide_local/button.py index 9c285881116..795cd4f1c2e 100644 --- a/homeassistant/components/slide_local/button.py +++ b/homeassistant/components/slide_local/button.py @@ -2,16 +2,25 @@ from __future__ import annotations +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) + from homeassistant.components.button import ButtonEntity from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import SlideConfigEntry +from .const import DOMAIN from .coordinator import SlideCoordinator from .entity import SlideEntity -PARALLEL_UPDATES = 0 +PARALLEL_UPDATES = 1 async def async_setup_entry( @@ -39,4 +48,15 @@ class SlideButton(SlideEntity, ButtonEntity): async def async_press(self) -> None: """Send out a calibrate command.""" - await self.coordinator.slide.slide_calibrate(self.coordinator.host) + try: + await self.coordinator.slide.slide_calibrate(self.coordinator.host) + except ( + ClientConnectionError, + AuthenticationFailed, + ClientTimeoutError, + DigestAuthCalcError, + ) as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="calibration_error", + ) from ex diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 887b90b6b11..4833f19e2b2 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -25,9 +25,7 @@ rules: config-entry-unloading: done log-when-unavailable: done entity-unavailable: done - action-exceptions: - status: exempt - comment: No custom action. + action-exceptions: done reauthentication-flow: todo parallel-updates: done test-coverage: todo diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index 24c03d2ff96..6aeda9f92fd 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -54,6 +54,12 @@ } }, "exceptions": { + "calibration_error": { + "message": "Error while sending the calibration request to the device." + }, + "touchgo_error": { + "message": "Error while sending the request setting Touch&Go to {state} to the device." + }, "update_error": { "message": "Error while updating data from the API." } diff --git a/homeassistant/components/slide_local/switch.py b/homeassistant/components/slide_local/switch.py index 6d357864c48..f1c33f9a76f 100644 --- a/homeassistant/components/slide_local/switch.py +++ b/homeassistant/components/slide_local/switch.py @@ -4,16 +4,25 @@ from __future__ import annotations from typing import Any +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) + from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import SlideConfigEntry +from .const import DOMAIN from .coordinator import SlideCoordinator from .entity import SlideEntity -PARALLEL_UPDATES = 0 +PARALLEL_UPDATES = 1 async def async_setup_entry( @@ -47,10 +56,38 @@ class SlideSwitch(SlideEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn off touchgo.""" - await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, False) + try: + await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, False) + except ( + ClientConnectionError, + AuthenticationFailed, + ClientTimeoutError, + DigestAuthCalcError, + ) as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="touchgo_error", + translation_placeholders={ + "state": "off", + }, + ) from ex await self.coordinator.async_request_refresh() async def async_turn_on(self, **kwargs: Any) -> None: """Turn on touchgo.""" - await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, True) + try: + await self.coordinator.slide.slide_set_touchgo(self.coordinator.host, True) + except ( + ClientConnectionError, + AuthenticationFailed, + ClientTimeoutError, + DigestAuthCalcError, + ) as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="touchgo_error", + translation_placeholders={ + "state": "on", + }, + ) from ex await self.coordinator.async_request_refresh() diff --git a/tests/components/slide_local/test_button.py b/tests/components/slide_local/test_button.py index 646c8fd7ef3..c232affbb99 100644 --- a/tests/components/slide_local/test_button.py +++ b/tests/components/slide_local/test_button.py @@ -2,11 +2,19 @@ from unittest.mock import AsyncMock +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) +import pytest from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_platform @@ -44,3 +52,37 @@ async def test_pressing_button( blocking=True, ) mock_slide_api.slide_calibrate.assert_called_once() + + +@pytest.mark.parametrize( + ("exception"), + [ + ClientConnectionError, + ClientTimeoutError, + AuthenticationFailed, + DigestAuthCalcError, + ], +) +async def test_pressing_button_exception( + hass: HomeAssistant, + exception: Exception, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test pressing button.""" + await setup_platform(hass, mock_config_entry, [Platform.BUTTON]) + + mock_slide_api.slide_calibrate.side_effect = exception + + with pytest.raises( + HomeAssistantError, + match="Error while sending the calibration request to the device", + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: "button.slide_bedroom_calibrate", + }, + blocking=True, + ) diff --git a/tests/components/slide_local/test_switch.py b/tests/components/slide_local/test_switch.py index 0ac9820ca10..9d0d8274aa5 100644 --- a/tests/components/slide_local/test_switch.py +++ b/tests/components/slide_local/test_switch.py @@ -2,6 +2,12 @@ from unittest.mock import AsyncMock +from goslideapi.goslideapi import ( + AuthenticationFailed, + ClientConnectionError, + ClientTimeoutError, + DigestAuthCalcError, +) import pytest from syrupy import SnapshotAssertion @@ -13,6 +19,7 @@ from homeassistant.components.switch import ( ) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_platform @@ -59,3 +66,38 @@ async def test_services( blocking=True, ) mock_slide_api.slide_set_touchgo.assert_called_once() + + +@pytest.mark.parametrize( + ("exception", "service"), + [ + (ClientConnectionError, SERVICE_TURN_OFF), + (ClientTimeoutError, SERVICE_TURN_ON), + (AuthenticationFailed, SERVICE_TURN_OFF), + (DigestAuthCalcError, SERVICE_TURN_ON), + ], +) +async def test_service_exception( + hass: HomeAssistant, + exception: Exception, + service: str, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test pressing button.""" + await setup_platform(hass, mock_config_entry, [Platform.SWITCH]) + + mock_slide_api.slide_set_touchgo.side_effect = exception + + with pytest.raises( + HomeAssistantError, + match=f"Error while sending the request setting Touch&Go to {service[5:]} to the device", + ): + await hass.services.async_call( + SWITCH_DOMAIN, + service, + { + ATTR_ENTITY_ID: "switch.slide_bedroom_touchgo", + }, + blocking=True, + ) From f46e764982f9dc0b67b564f10055cc0a510ddeba Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 18 Dec 2024 10:06:48 -0500 Subject: [PATCH 426/677] Update quality scale for Russound RIO (#133093) --- .../russound_rio/quality_scale.yaml | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/russound_rio/quality_scale.yaml b/homeassistant/components/russound_rio/quality_scale.yaml index 6edf439cae6..bd511802467 100644 --- a/homeassistant/components/russound_rio/quality_scale.yaml +++ b/homeassistant/components/russound_rio/quality_scale.yaml @@ -18,8 +18,8 @@ rules: comment: | This integration does not provide additional actions. docs-high-level-description: done - docs-installation-instructions: todo - docs-removal-instructions: todo + docs-installation-instructions: done + docs-removal-instructions: done entity-event-setup: done entity-unique-id: done has-entity-name: done @@ -40,7 +40,7 @@ rules: parallel-updates: done test-coverage: done integration-owner: done - docs-installation-parameters: todo + docs-installation-parameters: done docs-configuration-parameters: status: exempt comment: | @@ -61,17 +61,23 @@ rules: stale-devices: todo diagnostics: done exception-translations: done - icon-translations: todo + icon-translations: + status: exempt + comment: | + There are no entities that require icons. reconfiguration-flow: done dynamic-devices: todo discovery-update-info: todo repair-issues: done - docs-use-cases: todo + docs-use-cases: done docs-supported-devices: done docs-supported-functions: todo - docs-data-update: todo - docs-known-limitations: todo - docs-troubleshooting: todo + docs-data-update: done + docs-known-limitations: + status: exempt + comment: | + There are no known limitations beyond the push API delay noted in Troubleshooting. + docs-troubleshooting: done docs-examples: todo # Platinum From 2564533dae2319a790c002e736ee163f8634a26c Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Wed, 18 Dec 2024 10:22:39 -0500 Subject: [PATCH 427/677] Update Roborock to 2.8.1 (#133492) --- homeassistant/components/roborock/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index c305e4710fc..69d867aa164 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["roborock"], "requirements": [ - "python-roborock==2.7.2", + "python-roborock==2.8.1", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/requirements_all.txt b/requirements_all.txt index a6316379d8f..6336205eed3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2415,7 +2415,7 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.7.2 +python-roborock==2.8.1 # homeassistant.components.smarttub python-smarttub==0.0.38 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 33e7327568e..e8e131a5bd5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1942,7 +1942,7 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.7.2 +python-roborock==2.8.1 # homeassistant.components.smarttub python-smarttub==0.0.38 From a1558213c49871a955a3dec440cc1984b143615e Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Wed, 18 Dec 2024 16:53:15 +0100 Subject: [PATCH 428/677] =?UTF-8?q?Update=20fj=C3=A4r=C3=A5skupan=20to=202?= =?UTF-8?q?.3.1=20(#133493)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- homeassistant/components/fjaraskupan/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/fjaraskupan/manifest.json b/homeassistant/components/fjaraskupan/manifest.json index 91c74b68e01..cc368b3e92f 100644 --- a/homeassistant/components/fjaraskupan/manifest.json +++ b/homeassistant/components/fjaraskupan/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/fjaraskupan", "iot_class": "local_polling", "loggers": ["bleak", "fjaraskupan"], - "requirements": ["fjaraskupan==2.3.0"] + "requirements": ["fjaraskupan==2.3.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6336205eed3..47929f65916 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -915,7 +915,7 @@ fivem-api==0.1.2 fixerio==1.0.0a0 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.0 +fjaraskupan==2.3.1 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e8e131a5bd5..3b55231f898 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -774,7 +774,7 @@ fitbit==0.3.1 fivem-api==0.1.2 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.0 +fjaraskupan==2.3.1 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 From 5516f3609d2f282a96a487fd9fee45e7d0329624 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 18 Dec 2024 17:35:11 +0100 Subject: [PATCH 429/677] Rename strategy backup to automatic backup (#133489) * Rename strategy backup to automatic backup * Update homeassistant/components/backup/config.py Co-authored-by: Martin Hjelmare --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/backup/config.py | 38 +-- homeassistant/components/backup/manager.py | 52 ++-- homeassistant/components/backup/websocket.py | 12 +- .../backup/snapshots/test_backup.ambr | 22 +- .../backup/snapshots/test_websocket.ambr | 294 +++++++++--------- tests/components/backup/test_manager.py | 16 +- tests/components/backup/test_websocket.py | 210 ++++++------- tests/components/cloud/test_backup.py | 8 +- tests/components/hassio/test_backup.py | 4 +- tests/components/kitchen_sink/test_backup.py | 4 +- 10 files changed, 330 insertions(+), 330 deletions(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index ef21dc81ee5..e8d740d2e13 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -33,8 +33,8 @@ class StoredBackupConfig(TypedDict): """Represent the stored backup config.""" create_backup: StoredCreateBackupConfig - last_attempted_strategy_backup: str | None - last_completed_strategy_backup: str | None + last_attempted_automatic_backup: str | None + last_completed_automatic_backup: str | None retention: StoredRetentionConfig schedule: StoredBackupSchedule @@ -44,8 +44,8 @@ class BackupConfigData: """Represent loaded backup config data.""" create_backup: CreateBackupConfig - last_attempted_strategy_backup: datetime | None = None - last_completed_strategy_backup: datetime | None = None + last_attempted_automatic_backup: datetime | None = None + last_completed_automatic_backup: datetime | None = None retention: RetentionConfig schedule: BackupSchedule @@ -59,12 +59,12 @@ class BackupConfigData: include_folders = None retention = data["retention"] - if last_attempted_str := data["last_attempted_strategy_backup"]: + if last_attempted_str := data["last_attempted_automatic_backup"]: last_attempted = dt_util.parse_datetime(last_attempted_str) else: last_attempted = None - if last_attempted_str := data["last_completed_strategy_backup"]: + if last_attempted_str := data["last_completed_automatic_backup"]: last_completed = dt_util.parse_datetime(last_attempted_str) else: last_completed = None @@ -79,8 +79,8 @@ class BackupConfigData: name=data["create_backup"]["name"], password=data["create_backup"]["password"], ), - last_attempted_strategy_backup=last_attempted, - last_completed_strategy_backup=last_completed, + last_attempted_automatic_backup=last_attempted, + last_completed_automatic_backup=last_completed, retention=RetentionConfig( copies=retention["copies"], days=retention["days"], @@ -90,20 +90,20 @@ class BackupConfigData: def to_dict(self) -> StoredBackupConfig: """Convert backup config data to a dict.""" - if self.last_attempted_strategy_backup: - last_attempted = self.last_attempted_strategy_backup.isoformat() + if self.last_attempted_automatic_backup: + last_attempted = self.last_attempted_automatic_backup.isoformat() else: last_attempted = None - if self.last_completed_strategy_backup: - last_completed = self.last_completed_strategy_backup.isoformat() + if self.last_completed_automatic_backup: + last_completed = self.last_completed_automatic_backup.isoformat() else: last_completed = None return StoredBackupConfig( create_backup=self.create_backup.to_dict(), - last_attempted_strategy_backup=last_attempted, - last_completed_strategy_backup=last_completed, + last_attempted_automatic_backup=last_attempted, + last_completed_automatic_backup=last_completed, retention=self.retention.to_dict(), schedule=self.schedule.to_dict(), ) @@ -286,7 +286,7 @@ class BackupSchedule: self._unschedule_next(manager) now = dt_util.now() if (cron_event := self.cron_event) is None: - seed_time = manager.config.data.last_completed_strategy_backup or now + seed_time = manager.config.data.last_completed_automatic_backup or now cron_event = self.cron_event = CronSim(cron_pattern, seed_time) next_time = next(cron_event) @@ -316,7 +316,7 @@ class BackupSchedule: include_homeassistant=True, # always include HA name=config_data.create_backup.name, password=config_data.create_backup.password, - with_strategy_settings=True, + with_automatic_settings=True, ) except Exception: # noqa: BLE001 # another more specific exception will be added @@ -404,14 +404,14 @@ async def _delete_filtered_backups( get_agent_errors, ) - # only delete backups that are created by the backup strategy + # only delete backups that are created with the saved automatic settings backups = { backup_id: backup for backup_id, backup in backups.items() - if backup.with_strategy_settings + if backup.with_automatic_settings } - LOGGER.debug("Total strategy backups: %s", backups) + LOGGER.debug("Total automatic backups: %s", backups) filtered_backups = backup_filter(backups) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index a9bce8cb03d..e2c4f91730f 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -60,7 +60,7 @@ class ManagerBackup(AgentBackup): agent_ids: list[str] failed_agent_ids: list[str] - with_strategy_settings: bool | None + with_automatic_settings: bool | None @dataclass(frozen=True, kw_only=True, slots=True) @@ -445,10 +445,10 @@ class BackupManager: if (backup_id := agent_backup.backup_id) not in backups: if known_backup := self.known_backups.get(backup_id): failed_agent_ids = known_backup.failed_agent_ids - with_strategy_settings = known_backup.with_strategy_settings + with_automatic_settings = known_backup.with_automatic_settings else: failed_agent_ids = [] - with_strategy_settings = None + with_automatic_settings = None backups[backup_id] = ManagerBackup( agent_ids=[], addons=agent_backup.addons, @@ -462,7 +462,7 @@ class BackupManager: name=agent_backup.name, protected=agent_backup.protected, size=agent_backup.size, - with_strategy_settings=with_strategy_settings, + with_automatic_settings=with_automatic_settings, ) backups[backup_id].agent_ids.append(agent_ids[idx]) @@ -494,10 +494,10 @@ class BackupManager: if backup is None: if known_backup := self.known_backups.get(backup_id): failed_agent_ids = known_backup.failed_agent_ids - with_strategy_settings = known_backup.with_strategy_settings + with_automatic_settings = known_backup.with_automatic_settings else: failed_agent_ids = [] - with_strategy_settings = None + with_automatic_settings = None backup = ManagerBackup( agent_ids=[], addons=result.addons, @@ -511,7 +511,7 @@ class BackupManager: name=result.name, protected=result.protected, size=result.size, - with_strategy_settings=with_strategy_settings, + with_automatic_settings=with_automatic_settings, ) backup.agent_ids.append(agent_ids[idx]) @@ -611,7 +611,7 @@ class BackupManager: include_homeassistant: bool, name: str | None, password: str | None, - with_strategy_settings: bool = False, + with_automatic_settings: bool = False, ) -> NewBackup: """Create a backup.""" new_backup = await self.async_initiate_backup( @@ -623,7 +623,7 @@ class BackupManager: include_homeassistant=include_homeassistant, name=name, password=password, - with_strategy_settings=with_strategy_settings, + with_automatic_settings=with_automatic_settings, ) assert self._backup_finish_task await self._backup_finish_task @@ -640,14 +640,14 @@ class BackupManager: include_homeassistant: bool, name: str | None, password: str | None, - with_strategy_settings: bool = False, + with_automatic_settings: bool = False, ) -> NewBackup: """Initiate generating a backup.""" if self.state is not BackupManagerState.IDLE: raise HomeAssistantError(f"Backup manager busy: {self.state}") - if with_strategy_settings: - self.config.data.last_attempted_strategy_backup = dt_util.now() + if with_automatic_settings: + self.config.data.last_attempted_automatic_backup = dt_util.now() self.store.save() self.async_on_backup_event( @@ -663,7 +663,7 @@ class BackupManager: include_homeassistant=include_homeassistant, name=name, password=password, - with_strategy_settings=with_strategy_settings, + with_automatic_settings=with_automatic_settings, ) except Exception: self.async_on_backup_event( @@ -683,7 +683,7 @@ class BackupManager: include_homeassistant: bool, name: str | None, password: str | None, - with_strategy_settings: bool, + with_automatic_settings: bool, ) -> NewBackup: """Initiate generating a backup.""" if not agent_ids: @@ -708,13 +708,13 @@ class BackupManager: password=password, ) self._backup_finish_task = self.hass.async_create_task( - self._async_finish_backup(agent_ids, with_strategy_settings), + self._async_finish_backup(agent_ids, with_automatic_settings), name="backup_manager_finish_backup", ) return new_backup async def _async_finish_backup( - self, agent_ids: list[str], with_strategy_settings: bool + self, agent_ids: list[str], with_automatic_settings: bool ) -> None: if TYPE_CHECKING: assert self._backup_task is not None @@ -743,12 +743,12 @@ class BackupManager: open_stream=written_backup.open_stream, ) await written_backup.release_stream() - if with_strategy_settings: - # create backup was successful, update last_completed_strategy_backup - self.config.data.last_completed_strategy_backup = dt_util.now() + if with_automatic_settings: + # create backup was successful, update last_completed_automatic_backup + self.config.data.last_completed_automatic_backup = dt_util.now() self.store.save() self.known_backups.add( - written_backup.backup, agent_errors, with_strategy_settings + written_backup.backup, agent_errors, with_automatic_settings ) # delete old backups more numerous than copies @@ -870,7 +870,7 @@ class KnownBackups: backup["backup_id"]: KnownBackup( backup_id=backup["backup_id"], failed_agent_ids=backup["failed_agent_ids"], - with_strategy_settings=backup["with_strategy_settings"], + with_automatic_settings=backup["with_automatic_settings"], ) for backup in stored_backups } @@ -883,13 +883,13 @@ class KnownBackups: self, backup: AgentBackup, agent_errors: dict[str, Exception], - with_strategy_settings: bool, + with_automatic_settings: bool, ) -> None: """Add a backup.""" self._backups[backup.backup_id] = KnownBackup( backup_id=backup.backup_id, failed_agent_ids=list(agent_errors), - with_strategy_settings=with_strategy_settings, + with_automatic_settings=with_automatic_settings, ) self._manager.store.save() @@ -911,14 +911,14 @@ class KnownBackup: backup_id: str failed_agent_ids: list[str] - with_strategy_settings: bool + with_automatic_settings: bool def to_dict(self) -> StoredKnownBackup: """Convert known backup to a dict.""" return { "backup_id": self.backup_id, "failed_agent_ids": self.failed_agent_ids, - "with_strategy_settings": self.with_strategy_settings, + "with_automatic_settings": self.with_automatic_settings, } @@ -927,7 +927,7 @@ class StoredKnownBackup(TypedDict): backup_id: str failed_agent_ids: list[str] - with_strategy_settings: bool + with_automatic_settings: bool class CoreBackupReaderWriter(BackupReaderWriter): diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 7dacc39f9ba..abe3d372be5 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -25,7 +25,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> websocket_api.async_register_command(hass, handle_details) websocket_api.async_register_command(hass, handle_info) websocket_api.async_register_command(hass, handle_create) - websocket_api.async_register_command(hass, handle_create_with_strategy_settings) + websocket_api.async_register_command(hass, handle_create_with_automatic_settings) websocket_api.async_register_command(hass, handle_delete) websocket_api.async_register_command(hass, handle_restore) websocket_api.async_register_command(hass, handle_subscribe_events) @@ -52,8 +52,8 @@ async def handle_info( agent_id: str(err) for agent_id, err in agent_errors.items() }, "backups": list(backups.values()), - "last_attempted_strategy_backup": manager.config.data.last_attempted_strategy_backup, - "last_completed_strategy_backup": manager.config.data.last_completed_strategy_backup, + "last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup, + "last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup, }, ) @@ -181,11 +181,11 @@ async def handle_create( @websocket_api.require_admin @websocket_api.websocket_command( { - vol.Required("type"): "backup/generate_with_strategy_settings", + vol.Required("type"): "backup/generate_with_automatic_settings", } ) @websocket_api.async_response -async def handle_create_with_strategy_settings( +async def handle_create_with_automatic_settings( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], @@ -202,7 +202,7 @@ async def handle_create_with_strategy_settings( include_homeassistant=True, # always include HA name=config_data.create_backup.name, password=config_data.create_backup.password, - with_strategy_settings=True, + with_automatic_settings=True, ) connection.send_result(msg["id"], backup) diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr index 9ef865955fe..8cbf34895f9 100644 --- a/tests/components/backup/snapshots/test_backup.ambr +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -78,11 +78,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -110,8 +110,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -139,8 +139,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -168,8 +168,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -197,8 +197,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index f43a7ed7a2c..58a5162b1bf 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -190,8 +190,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -225,8 +225,8 @@ 'name': 'test-name', 'password': 'test-password', }), - 'last_attempted_strategy_backup': '2024-10-26T04:45:00+01:00', - 'last_completed_strategy_backup': '2024-10-26T04:45:00+01:00', + 'last_attempted_automatic_backup': '2024-10-26T04:45:00+01:00', + 'last_completed_automatic_backup': '2024-10-26T04:45:00+01:00', 'retention': dict({ 'copies': 3, 'days': 7, @@ -256,8 +256,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': None, @@ -287,8 +287,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': '2024-10-27T04:45:00+01:00', - 'last_completed_strategy_backup': '2024-10-26T04:45:00+01:00', + 'last_attempted_automatic_backup': '2024-10-27T04:45:00+01:00', + 'last_completed_automatic_backup': '2024-10-26T04:45:00+01:00', 'retention': dict({ 'copies': None, 'days': 7, @@ -318,8 +318,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -349,8 +349,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -379,8 +379,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -410,8 +410,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -442,8 +442,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -473,8 +473,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -504,8 +504,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -536,8 +536,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -567,8 +567,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -598,8 +598,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -630,8 +630,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -661,8 +661,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -692,8 +692,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -724,8 +724,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -755,8 +755,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -786,8 +786,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -818,8 +818,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -849,8 +849,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -884,8 +884,8 @@ 'name': 'test-name', 'password': 'test-password', }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -920,8 +920,8 @@ 'name': 'test-name', 'password': 'test-password', }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -951,8 +951,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -982,8 +982,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': 7, @@ -1014,8 +1014,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': 7, @@ -1045,8 +1045,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1076,8 +1076,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1108,8 +1108,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1139,8 +1139,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1170,8 +1170,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': None, @@ -1202,8 +1202,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': None, @@ -1233,8 +1233,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1264,8 +1264,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -1296,8 +1296,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': 7, @@ -1327,8 +1327,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1358,8 +1358,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': None, @@ -1390,8 +1390,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': 3, 'days': None, @@ -1421,8 +1421,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1451,8 +1451,8 @@ 'name': None, 'password': None, }), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, 'retention': dict({ 'copies': None, 'days': None, @@ -1474,8 +1474,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1500,8 +1500,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1539,11 +1539,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1568,8 +1568,8 @@ }), 'backups': list([ ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1607,11 +1607,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1660,11 +1660,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1697,11 +1697,11 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1745,11 +1745,11 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1788,11 +1788,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1841,11 +1841,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1895,11 +1895,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -1950,11 +1950,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_automatic_settings': False, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2003,11 +2003,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2056,11 +2056,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2109,11 +2109,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2163,11 +2163,11 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_strategy_settings': False, + 'with_automatic_settings': False, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2216,7 +2216,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), }), 'success': True, @@ -2254,7 +2254,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), }), 'success': True, @@ -2305,7 +2305,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), }), 'success': True, @@ -2344,7 +2344,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), }), 'success': True, @@ -2607,11 +2607,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2649,11 +2649,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2692,11 +2692,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2729,7 +2729,7 @@ 'name': 'Test 2', 'protected': False, 'size': 1, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), dict({ 'addons': list([ @@ -2756,11 +2756,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', @@ -2799,11 +2799,11 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_strategy_settings': None, + 'with_automatic_settings': None, }), ]), - 'last_attempted_strategy_backup': None, - 'last_completed_strategy_backup': None, + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, }), 'success': True, 'type': 'result', diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 302f4e07011..a9b4674ad96 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -260,8 +260,8 @@ async def test_async_initiate_backup( assert result["result"] == { "backups": [], "agent_errors": {}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -424,8 +424,8 @@ async def test_async_initiate_backup_with_agent_error( assert result["result"] == { "backups": [], "agent_errors": {}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, } await ws_client.send_json_auto_id({"type": "backup/subscribe_events"}) @@ -496,7 +496,7 @@ async def test_async_initiate_backup_with_agent_error( "name": "Core 2025.1.0", "protected": False, "size": 123, - "with_strategy_settings": False, + "with_automatic_settings": False, } await ws_client.send_json_auto_id( @@ -513,8 +513,8 @@ async def test_async_initiate_backup_with_agent_error( assert result["result"] == { "agent_errors": {}, "backups": [expected_backup_data], - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, } await hass.async_block_till_done() @@ -522,7 +522,7 @@ async def test_async_initiate_backup_with_agent_error( { "backup_id": "abc123", "failed_agent_ids": ["test.remote"], - "with_strategy_settings": False, + "with_automatic_settings": False, } ] diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 665512eca97..1a0e2cc1a81 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -55,8 +55,8 @@ DEFAULT_STORAGE_DATA: dict[str, Any] = { "name": None, "password": None, }, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, "retention": { "copies": None, "days": None, @@ -276,7 +276,7 @@ async def test_delete( { "backup_id": "abc123", "failed_agent_ids": ["test.remote"], - "with_strategy_settings": False, + "with_automatic_settings": False, } ] }, @@ -487,7 +487,7 @@ async def test_generate_calls_create( "include_homeassistant": True, "name": None, "password": None, - "with_strategy_settings": True, + "with_automatic_settings": True, }, ), ( @@ -509,7 +509,7 @@ async def test_generate_calls_create( "include_homeassistant": True, "name": "test-name", "password": "test-password", - "with_strategy_settings": True, + "with_automatic_settings": True, }, ), ], @@ -522,7 +522,7 @@ async def test_generate_with_default_settings_calls_create( create_backup_settings: dict[str, Any], expected_call_params: dict[str, Any], ) -> None: - """Test backup/generate_with_strategy_settings calls async_initiate_backup.""" + """Test backup/generate_with_automatic_settings calls async_initiate_backup.""" await setup_backup_integration(hass, with_hassio=False) client = await hass_ws_client(hass) @@ -540,7 +540,7 @@ async def test_generate_with_default_settings_calls_create( return_value=NewBackup(backup_job_id="abc123"), ) as generate_backup: await client.send_json_auto_id( - {"type": "backup/generate_with_strategy_settings"} + {"type": "backup/generate_with_automatic_settings"} ) result = await client.receive_json() assert result["success"] @@ -780,8 +780,8 @@ async def test_agents_info( "password": "test-password", }, "retention": {"copies": 3, "days": 7}, - "last_attempted_strategy_backup": "2024-10-26T04:45:00+01:00", - "last_completed_strategy_backup": "2024-10-26T04:45:00+01:00", + "last_attempted_automatic_backup": "2024-10-26T04:45:00+01:00", + "last_completed_automatic_backup": "2024-10-26T04:45:00+01:00", "schedule": {"state": "daily"}, }, }, @@ -798,8 +798,8 @@ async def test_agents_info( "password": None, }, "retention": {"copies": 3, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, "schedule": {"state": "never"}, }, }, @@ -816,8 +816,8 @@ async def test_agents_info( "password": None, }, "retention": {"copies": None, "days": 7}, - "last_attempted_strategy_backup": "2024-10-27T04:45:00+01:00", - "last_completed_strategy_backup": "2024-10-26T04:45:00+01:00", + "last_attempted_automatic_backup": "2024-10-27T04:45:00+01:00", + "last_completed_automatic_backup": "2024-10-26T04:45:00+01:00", "schedule": {"state": "never"}, }, }, @@ -834,8 +834,8 @@ async def test_agents_info( "password": None, }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, "schedule": {"state": "mon"}, }, }, @@ -852,8 +852,8 @@ async def test_agents_info( "password": None, }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, "schedule": {"state": "sat"}, }, }, @@ -1022,7 +1022,7 @@ async def test_config_update_errors( @pytest.mark.parametrize( ( "command", - "last_completed_strategy_backup", + "last_completed_automatic_backup", "time_1", "time_2", "attempted_backup_time", @@ -1154,7 +1154,7 @@ async def test_config_schedule_logic( hass_storage: dict[str, Any], create_backup: AsyncMock, command: dict[str, Any], - last_completed_strategy_backup: str, + last_completed_automatic_backup: str, time_1: str, time_2: str, attempted_backup_time: str, @@ -1179,8 +1179,8 @@ async def test_config_schedule_logic( "password": "test-password", }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": last_completed_strategy_backup, - "last_completed_strategy_backup": last_completed_strategy_backup, + "last_attempted_automatic_backup": last_completed_automatic_backup, + "last_completed_automatic_backup": last_completed_automatic_backup, "schedule": {"state": "daily"}, }, } @@ -1210,11 +1210,11 @@ async def test_config_schedule_logic( async_fire_time_changed(hass, fire_all=True) # flush out storage save await hass.async_block_till_done() assert ( - hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_attempted_automatic_backup"] == attempted_backup_time ) assert ( - hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_completed_automatic_backup"] == completed_backup_time ) @@ -1251,22 +1251,22 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1290,22 +1290,22 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1329,27 +1329,27 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-5": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1373,27 +1373,27 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-5": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1417,22 +1417,22 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1456,22 +1456,22 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1495,27 +1495,27 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-5": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1539,12 +1539,12 @@ async def test_config_schedule_logic( { "backup-1": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1595,8 +1595,8 @@ async def test_config_retention_copies_logic( "password": "test-password", }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": last_backup_time, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": last_backup_time, "schedule": {"state": "daily"}, }, } @@ -1628,11 +1628,11 @@ async def test_config_retention_copies_logic( async_fire_time_changed(hass, fire_all=True) # flush out storage save await hass.async_block_till_done() assert ( - hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_attempted_automatic_backup"] == backup_time ) assert ( - hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_completed_automatic_backup"] == backup_time ) @@ -1641,7 +1641,7 @@ async def test_config_retention_copies_logic( ("backup_command", "backup_time"), [ ( - {"type": "backup/generate_with_strategy_settings"}, + {"type": "backup/generate_with_automatic_settings"}, "2024-11-11T12:00:00+01:00", ), ( @@ -1672,22 +1672,22 @@ async def test_config_retention_copies_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1708,22 +1708,22 @@ async def test_config_retention_copies_logic( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1744,27 +1744,27 @@ async def test_config_retention_copies_logic( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-5": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1785,27 +1785,27 @@ async def test_config_retention_copies_logic( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-5": MagicMock( date="2024-11-12T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1852,8 +1852,8 @@ async def test_config_retention_copies_logic_manual_backup( "password": "test-password", }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, "schedule": {"state": "daily"}, }, } @@ -1889,11 +1889,11 @@ async def test_config_retention_copies_logic_manual_backup( async_fire_time_changed(hass, fire_all=True) # flush out storage save await hass.async_block_till_done() assert ( - hass_storage[DOMAIN]["data"]["config"]["last_attempted_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_attempted_automatic_backup"] == backup_time ) assert ( - hass_storage[DOMAIN]["data"]["config"]["last_completed_strategy_backup"] + hass_storage[DOMAIN]["data"]["config"]["last_completed_automatic_backup"] == backup_time ) @@ -1922,17 +1922,17 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1955,17 +1955,17 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -1988,22 +1988,22 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -2026,17 +2026,17 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -2059,17 +2059,17 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -2092,22 +2092,22 @@ async def test_config_retention_copies_logic_manual_backup( { "backup-1": MagicMock( date="2024-11-09T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-2": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-3": MagicMock( date="2024-11-11T04:45:00+01:00", - with_strategy_settings=True, + with_automatic_settings=True, spec=ManagerBackup, ), "backup-4": MagicMock( date="2024-11-10T04:45:00+01:00", - with_strategy_settings=False, + with_automatic_settings=False, spec=ManagerBackup, ), }, @@ -2155,8 +2155,8 @@ async def test_config_retention_days_logic( "password": "test-password", }, "retention": {"copies": None, "days": None}, - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": last_backup_time, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": last_backup_time, "schedule": {"state": "never"}, }, } diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index bd8e80e0666..57c801e0d68 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -171,7 +171,7 @@ async def test_agents_list_backups( "size": 34519040, "agent_ids": ["cloud.cloud"], "failed_agent_ids": [], - "with_strategy_settings": None, + "with_automatic_settings": None, } ] @@ -195,8 +195,8 @@ async def test_agents_list_backups_fail_cloud( assert response["result"] == { "agent_errors": {"cloud.cloud": "Failed to list backups"}, "backups": [], - "last_attempted_strategy_backup": None, - "last_completed_strategy_backup": None, + "last_attempted_automatic_backup": None, + "last_completed_automatic_backup": None, } @@ -218,7 +218,7 @@ async def test_agents_list_backups_fail_cloud( "size": 34519040, "agent_ids": ["cloud.cloud"], "failed_agent_ids": [], - "with_strategy_settings": None, + "with_automatic_settings": None, }, ), ( diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 45aa28c19d6..c342c006732 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -341,7 +341,7 @@ async def test_agent_info( "name": "Test", "protected": False, "size": 1048576, - "with_strategy_settings": None, + "with_automatic_settings": None, }, ), ( @@ -362,7 +362,7 @@ async def test_agent_info( "name": "Test", "protected": False, "size": 1048576, - "with_strategy_settings": None, + "with_automatic_settings": None, }, ), ], diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py index 81876b5c3d1..25ae2e3a2f6 100644 --- a/tests/components/kitchen_sink/test_backup.py +++ b/tests/components/kitchen_sink/test_backup.py @@ -104,7 +104,7 @@ async def test_agents_list_backups( "name": "Kitchen sink syncer", "protected": False, "size": 1234, - "with_strategy_settings": None, + "with_automatic_settings": None, } ] @@ -183,7 +183,7 @@ async def test_agents_upload( "name": "Test", "protected": False, "size": 0.0, - "with_strategy_settings": False, + "with_automatic_settings": False, } From a6089b497a908ce6d0e18a92de5ef0cc5807457d Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Wed, 18 Dec 2024 18:03:27 +0100 Subject: [PATCH 430/677] =?UTF-8?q?Update=20fj=C3=A4r=C3=A5skupan=20to=202?= =?UTF-8?q?.3.2=20(#133499)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- homeassistant/components/fjaraskupan/light.py | 3 --- homeassistant/components/fjaraskupan/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/fjaraskupan/light.py b/homeassistant/components/fjaraskupan/light.py index b33904c805d..f0083591d4d 100644 --- a/homeassistant/components/fjaraskupan/light.py +++ b/homeassistant/components/fjaraskupan/light.py @@ -4,8 +4,6 @@ from __future__ import annotations from typing import Any -from fjaraskupan import COMMAND_LIGHT_ON_OFF - from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -62,7 +60,6 @@ class Light(CoordinatorEntity[FjaraskupanCoordinator], LightEntity): if self.is_on: async with self.coordinator.async_connect_and_update() as device: await device.send_dim(0) - await device.send_command(COMMAND_LIGHT_ON_OFF) @property def is_on(self) -> bool: diff --git a/homeassistant/components/fjaraskupan/manifest.json b/homeassistant/components/fjaraskupan/manifest.json index cc368b3e92f..2fd49aac5ee 100644 --- a/homeassistant/components/fjaraskupan/manifest.json +++ b/homeassistant/components/fjaraskupan/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/fjaraskupan", "iot_class": "local_polling", "loggers": ["bleak", "fjaraskupan"], - "requirements": ["fjaraskupan==2.3.1"] + "requirements": ["fjaraskupan==2.3.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 47929f65916..af2457b8d88 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -915,7 +915,7 @@ fivem-api==0.1.2 fixerio==1.0.0a0 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.1 +fjaraskupan==2.3.2 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3b55231f898..f7f79ed6200 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -774,7 +774,7 @@ fitbit==0.3.1 fivem-api==0.1.2 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.1 +fjaraskupan==2.3.2 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 From 920de9060398829a1d3d4f13cd817ea7392a6976 Mon Sep 17 00:00:00 2001 From: peteS-UK <64092177+peteS-UK@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:22:22 +0000 Subject: [PATCH 431/677] Increase Squeezebox config_flow test coverage to 100% (#133484) Co-authored-by: Joost Lekkerkerker --- .../components/squeezebox/test_config_flow.py | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/tests/components/squeezebox/test_config_flow.py b/tests/components/squeezebox/test_config_flow.py index 0a03bcc291c..f2c9636c470 100644 --- a/tests/components/squeezebox/test_config_flow.py +++ b/tests/components/squeezebox/test_config_flow.py @@ -166,6 +166,67 @@ async def test_form_invalid_auth(hass: HomeAssistant) -> None: assert result["errors"] == {"base": "invalid_auth"} +async def test_form_validate_exception(hass: HomeAssistant) -> None: + """Test we handle exception.""" + + with ( + patch( + "pysqueezebox.Server.async_query", + return_value={"uuid": UUID}, + ), + patch( + "homeassistant.components.squeezebox.async_setup_entry", + return_value=True, + ), + patch( + "homeassistant.components.squeezebox.config_flow.async_discover", + mock_discover, + ), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "edit" + + with patch( + "homeassistant.components.squeezebox.config_flow.Server.async_query", + side_effect=Exception, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_USERNAME: "", + CONF_PASSWORD: "", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_USERNAME: "", + CONF_PASSWORD: "", + CONF_HTTPS: False, + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == HOST + assert result["data"] == { + CONF_HOST: HOST, + CONF_PORT: PORT, + CONF_USERNAME: "", + CONF_PASSWORD: "", + CONF_HTTPS: False, + } + + async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( From fc622e398f288cadecd8eccdfb6e57001ef1c8f5 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:24:12 +0100 Subject: [PATCH 432/677] add exception translation to enphase_envoy (#132483) --- .../components/enphase_envoy/__init__.py | 9 +++++++-- .../components/enphase_envoy/coordinator.py | 20 ++++++++++++++++--- .../enphase_envoy/quality_scale.yaml | 4 +--- .../components/enphase_envoy/strings.json | 11 ++++++++++ 4 files changed, 36 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/enphase_envoy/__init__.py b/homeassistant/components/enphase_envoy/__init__.py index db36cab1288..f4fe4aff2cb 100644 --- a/homeassistant/components/enphase_envoy/__init__.py +++ b/homeassistant/components/enphase_envoy/__init__.py @@ -51,8 +51,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: EnphaseConfigEntry) -> b # wait for the next discovery to find the device at its new address # and update the config entry so we do not mix up devices. raise ConfigEntryNotReady( - f"Unexpected device found at {host}; expected {entry.unique_id}, " - f"found {envoy.serial_number}" + translation_domain=DOMAIN, + translation_key="unexpected_device", + translation_placeholders={ + "host": host, + "expected_serial": str(entry.unique_id), + "actual_serial": str(envoy.serial_number), + }, ) entry.runtime_data = coordinator diff --git a/homeassistant/components/enphase_envoy/coordinator.py b/homeassistant/components/enphase_envoy/coordinator.py index 00bc7666f78..386661402de 100644 --- a/homeassistant/components/enphase_envoy/coordinator.py +++ b/homeassistant/components/enphase_envoy/coordinator.py @@ -18,7 +18,7 @@ from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed import homeassistant.util.dt as dt_util -from .const import INVALID_AUTH_ERRORS +from .const import DOMAIN, INVALID_AUTH_ERRORS SCAN_INTERVAL = timedelta(seconds=60) @@ -158,9 +158,23 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # token likely expired or firmware changed, try to re-authenticate self._setup_complete = False continue - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="authentication_error", + translation_placeholders={ + "host": envoy.host, + "args": err.args[0], + }, + ) from err except EnvoyError as err: - raise UpdateFailed(f"Error communicating with API: {err}") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="envoy_error", + translation_placeholders={ + "host": envoy.host, + "args": err.args[0], + }, + ) from err # if we have a firmware version from previous setup, compare to current one # when envoy gets new firmware there will be an authentication failure diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index c4077b8df67..4b83c2886f7 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -108,9 +108,7 @@ rules: entity-device-class: done entity-disabled-by-default: done entity-translations: done - exception-translations: - status: todo - comment: pending https://github.com/home-assistant/core/pull/132483 + exception-translations: done icon-translations: todo reconfiguration-flow: done repair-issues: diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index a338deb9638..a78d0bc032a 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -371,5 +371,16 @@ "name": "Grid enabled" } } + }, + "exceptions": { + "unexpected_device": { + "message": "Unexpected Envoy serial-number found at {host}; expected {expected_serial}, found {actual_serial}" + }, + "authentication_error": { + "message": "Envoy authentication failure on {host}: {args}" + }, + "envoy_error": { + "message": "Error communicating with Envoy API on {host}: {args}" + } } } From 51d63ba50872331a8a8aff2f86695ba37e897aca Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 18 Dec 2024 18:30:46 +0100 Subject: [PATCH 433/677] Store automatic backup flag in backup metadata (#133500) --- homeassistant/components/backup/manager.py | 52 +++++++++++++------ homeassistant/components/backup/models.py | 8 +++ homeassistant/components/backup/util.py | 1 + homeassistant/components/backup/websocket.py | 4 +- homeassistant/components/hassio/backup.py | 3 ++ .../components/kitchen_sink/backup.py | 1 + tests/components/backup/common.py | 5 +- .../backup/snapshots/test_backup.ambr | 2 +- .../backup/snapshots/test_websocket.ambr | 32 ++++++------ tests/components/backup/test_manager.py | 51 ++++++++++-------- tests/components/backup/test_websocket.py | 2 +- tests/components/cloud/test_backup.py | 5 ++ tests/components/hassio/test_backup.py | 8 ++- tests/components/kitchen_sink/test_backup.py | 5 ++ 14 files changed, 120 insertions(+), 59 deletions(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index e2c4f91730f..99373b1942a 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -23,7 +23,7 @@ from homeassistant.backup_restore import RESTORE_BACKUP_FILE, password_to_key from homeassistant.const import __version__ as HAVERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import integration_platform +from homeassistant.helpers import instance_id, integration_platform from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util @@ -200,6 +200,7 @@ class BackupReaderWriter(abc.ABC): *, agent_ids: list[str], backup_name: str, + extra_metadata: dict[str, bool | str], include_addons: list[str] | None, include_all_addons: bool, include_database: bool, @@ -445,16 +446,18 @@ class BackupManager: if (backup_id := agent_backup.backup_id) not in backups: if known_backup := self.known_backups.get(backup_id): failed_agent_ids = known_backup.failed_agent_ids - with_automatic_settings = known_backup.with_automatic_settings else: failed_agent_ids = [] - with_automatic_settings = None + with_automatic_settings = self.is_our_automatic_backup( + agent_backup, await instance_id.async_get(self.hass) + ) backups[backup_id] = ManagerBackup( agent_ids=[], addons=agent_backup.addons, backup_id=backup_id, date=agent_backup.date, database_included=agent_backup.database_included, + extra_metadata=agent_backup.extra_metadata, failed_agent_ids=failed_agent_ids, folders=agent_backup.folders, homeassistant_included=agent_backup.homeassistant_included, @@ -494,16 +497,18 @@ class BackupManager: if backup is None: if known_backup := self.known_backups.get(backup_id): failed_agent_ids = known_backup.failed_agent_ids - with_automatic_settings = known_backup.with_automatic_settings else: failed_agent_ids = [] - with_automatic_settings = None + with_automatic_settings = self.is_our_automatic_backup( + result, await instance_id.async_get(self.hass) + ) backup = ManagerBackup( agent_ids=[], addons=result.addons, backup_id=result.backup_id, date=result.date, database_included=result.database_included, + extra_metadata=result.extra_metadata, failed_agent_ids=failed_agent_ids, folders=result.folders, homeassistant_included=result.homeassistant_included, @@ -517,6 +522,22 @@ class BackupManager: return (backup, agent_errors) + @staticmethod + def is_our_automatic_backup( + backup: AgentBackup, our_instance_id: str + ) -> bool | None: + """Check if a backup was created by us and return automatic_settings flag. + + Returns `None` if the backup was not created by us, or if the + automatic_settings flag is not a boolean. + """ + if backup.extra_metadata.get("instance_id") != our_instance_id: + return None + with_automatic_settings = backup.extra_metadata.get("with_automatic_settings") + if not isinstance(with_automatic_settings, bool): + return None + return with_automatic_settings + async def async_delete_backup(self, backup_id: str) -> dict[str, Exception]: """Delete a backup.""" agent_errors: dict[str, Exception] = {} @@ -598,7 +619,7 @@ class BackupManager: open_stream=written_backup.open_stream, ) await written_backup.release_stream() - self.known_backups.add(written_backup.backup, agent_errors, False) + self.known_backups.add(written_backup.backup, agent_errors) async def async_create_backup( self, @@ -699,6 +720,10 @@ class BackupManager: new_backup, self._backup_task = await self._reader_writer.async_create_backup( agent_ids=agent_ids, backup_name=backup_name, + extra_metadata={ + "instance_id": await instance_id.async_get(self.hass), + "with_automatic_settings": with_automatic_settings, + }, include_addons=include_addons, include_all_addons=include_all_addons, include_database=include_database, @@ -747,9 +772,7 @@ class BackupManager: # create backup was successful, update last_completed_automatic_backup self.config.data.last_completed_automatic_backup = dt_util.now() self.store.save() - self.known_backups.add( - written_backup.backup, agent_errors, with_automatic_settings - ) + self.known_backups.add(written_backup.backup, agent_errors) # delete old backups more numerous than copies await delete_backups_exceeding_configured_count(self) @@ -870,7 +893,6 @@ class KnownBackups: backup["backup_id"]: KnownBackup( backup_id=backup["backup_id"], failed_agent_ids=backup["failed_agent_ids"], - with_automatic_settings=backup["with_automatic_settings"], ) for backup in stored_backups } @@ -883,13 +905,11 @@ class KnownBackups: self, backup: AgentBackup, agent_errors: dict[str, Exception], - with_automatic_settings: bool, ) -> None: """Add a backup.""" self._backups[backup.backup_id] = KnownBackup( backup_id=backup.backup_id, failed_agent_ids=list(agent_errors), - with_automatic_settings=with_automatic_settings, ) self._manager.store.save() @@ -911,14 +931,12 @@ class KnownBackup: backup_id: str failed_agent_ids: list[str] - with_automatic_settings: bool def to_dict(self) -> StoredKnownBackup: """Convert known backup to a dict.""" return { "backup_id": self.backup_id, "failed_agent_ids": self.failed_agent_ids, - "with_automatic_settings": self.with_automatic_settings, } @@ -927,7 +945,6 @@ class StoredKnownBackup(TypedDict): backup_id: str failed_agent_ids: list[str] - with_automatic_settings: bool class CoreBackupReaderWriter(BackupReaderWriter): @@ -945,6 +962,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): *, agent_ids: list[str], backup_name: str, + extra_metadata: dict[str, bool | str], include_addons: list[str] | None, include_all_addons: bool, include_database: bool, @@ -969,6 +987,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): agent_ids=agent_ids, backup_id=backup_id, backup_name=backup_name, + extra_metadata=extra_metadata, include_database=include_database, date_str=date_str, on_progress=on_progress, @@ -987,6 +1006,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): backup_id: str, backup_name: str, date_str: str, + extra_metadata: dict[str, bool | str], include_database: bool, on_progress: Callable[[ManagerStateEvent], None], password: str | None, @@ -1012,6 +1032,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): backup_data = { "compressed": True, "date": date_str, + "extra": extra_metadata, "homeassistant": { "exclude_database": not include_database, "version": HAVERSION, @@ -1035,6 +1056,7 @@ class CoreBackupReaderWriter(BackupReaderWriter): backup_id=backup_id, database_included=include_database, date=date_str, + extra_metadata=extra_metadata, folders=[], homeassistant_included=True, homeassistant_version=HAVERSION, diff --git a/homeassistant/components/backup/models.py b/homeassistant/components/backup/models.py index 6306d9f1fec..a937933f04c 100644 --- a/homeassistant/components/backup/models.py +++ b/homeassistant/components/backup/models.py @@ -33,6 +33,7 @@ class AgentBackup: backup_id: str date: str database_included: bool + extra_metadata: dict[str, bool | str] folders: list[Folder] homeassistant_included: bool homeassistant_version: str | None # None if homeassistant_included is False @@ -44,6 +45,12 @@ class AgentBackup: """Return a dict representation of this backup.""" return asdict(self) + def as_frontend_json(self) -> dict: + """Return a dict representation of this backup for sending to frontend.""" + return { + key: val for key, val in asdict(self).items() if key != "extra_metadata" + } + @classmethod def from_dict(cls, data: dict[str, Any]) -> Self: """Create an instance from a JSON serialization.""" @@ -52,6 +59,7 @@ class AgentBackup: backup_id=data["backup_id"], date=data["date"], database_included=data["database_included"], + extra_metadata=data["extra_metadata"], folders=[Folder(folder) for folder in data["folders"]], homeassistant_included=data["homeassistant_included"], homeassistant_version=data["homeassistant_version"], diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py index 1d8252cc30b..bb01a9a4e3f 100644 --- a/homeassistant/components/backup/util.py +++ b/homeassistant/components/backup/util.py @@ -60,6 +60,7 @@ def read_backup(backup_path: Path) -> AgentBackup: backup_id=cast(str, data["slug"]), database_included=database_included, date=cast(str, data["date"]), + extra_metadata=cast(dict[str, bool | str], data.get("metadata", {})), folders=folders, homeassistant_included=homeassistant_included, homeassistant_version=homeassistant_version, diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index abe3d372be5..2fee84e39bb 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -51,7 +51,7 @@ async def handle_info( "agent_errors": { agent_id: str(err) for agent_id, err in agent_errors.items() }, - "backups": list(backups.values()), + "backups": [backup.as_frontend_json() for backup in backups.values()], "last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup, "last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup, }, @@ -81,7 +81,7 @@ async def handle_details( "agent_errors": { agent_id: str(err) for agent_id, err in agent_errors.items() }, - "backup": backup, + "backup": backup.as_frontend_json() if backup else None, }, ) diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index 4bc6dff44d2..1b7cf930588 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -104,6 +104,7 @@ def _backup_details_to_agent_backup( backup_id=details.slug, database_included=database_included, date=details.date.isoformat(), + extra_metadata=details.extra or {}, folders=[Folder(folder) for folder in details.folders], homeassistant_included=homeassistant_included, homeassistant_version=details.homeassistant, @@ -202,6 +203,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): *, agent_ids: list[str], backup_name: str, + extra_metadata: dict[str, bool | str], include_addons: list[str] | None, include_all_addons: bool, include_database: bool, @@ -242,6 +244,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): location=locations or LOCATION_CLOUD_BACKUP, homeassistant_exclude_database=not include_database, background=True, + extra=extra_metadata, ) ) backup_task = self._hass.async_create_task( diff --git a/homeassistant/components/kitchen_sink/backup.py b/homeassistant/components/kitchen_sink/backup.py index 615364f55ee..c4a045aeefc 100644 --- a/homeassistant/components/kitchen_sink/backup.py +++ b/homeassistant/components/kitchen_sink/backup.py @@ -58,6 +58,7 @@ class KitchenSinkBackupAgent(BackupAgent): backup_id="abc123", database_included=False, date="1970-01-01T00:00:00Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index b06b8a5ef5d..ffecd1c4186 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import AsyncIterator, Callable, Coroutine from pathlib import Path from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import ANY, AsyncMock, Mock, patch from homeassistant.components.backup import ( DOMAIN, @@ -29,6 +29,7 @@ TEST_BACKUP_ABC123 = AgentBackup( backup_id="abc123", database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={"instance_id": ANY, "with_automatic_settings": True}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -43,6 +44,7 @@ TEST_BACKUP_DEF456 = AgentBackup( backup_id="def456", database_included=False, date="1980-01-01T00:00:00.000Z", + extra_metadata={"instance_id": "unknown_uuid", "with_automatic_settings": True}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -69,6 +71,7 @@ class BackupAgentTest(BackupAgent): backup_id="abc123", database_included=True, date="1970-01-01T00:00:00Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr index 8cbf34895f9..f21de9d9fad 100644 --- a/tests/components/backup/snapshots/test_backup.ambr +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -78,7 +78,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 58a5162b1bf..1607e2e15d9 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -1539,7 +1539,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -1607,7 +1607,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -1660,7 +1660,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -1788,7 +1788,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -1841,7 +1841,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -1950,7 +1950,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_automatic_settings': False, + 'with_automatic_settings': None, }), ]), 'last_attempted_automatic_backup': None, @@ -2163,7 +2163,7 @@ 'name': 'Test', 'protected': False, 'size': 13, - 'with_automatic_settings': False, + 'with_automatic_settings': None, }), ]), 'last_attempted_automatic_backup': None, @@ -2216,7 +2216,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), }), 'success': True, @@ -2254,7 +2254,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), }), 'success': True, @@ -2305,7 +2305,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), }), 'success': True, @@ -2344,7 +2344,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), }), 'success': True, @@ -2607,7 +2607,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -2649,7 +2649,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -2692,7 +2692,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -2756,7 +2756,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, @@ -2799,7 +2799,7 @@ 'name': 'Test', 'protected': False, 'size': 0, - 'with_automatic_settings': None, + 'with_automatic_settings': True, }), ]), 'last_attempted_automatic_backup': None, diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index a9b4674ad96..5795309501d 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -121,6 +121,10 @@ async def test_async_create_backup( assert create_backup.call_args == call( agent_ids=["backup.local"], backup_name="Core 2025.1.0", + extra_metadata={ + "instance_id": hass.data["core.uuid"], + "with_automatic_settings": False, + }, include_addons=None, include_all_addons=False, include_database=True, @@ -325,6 +329,10 @@ async def test_async_initiate_backup( assert backup_json_dict == { "compressed": True, "date": ANY, + "extra": { + "instance_id": hass.data["core.uuid"], + "with_automatic_settings": False, + }, "homeassistant": { "exclude_database": not include_database, "version": "2025.1.0", @@ -345,30 +353,30 @@ async def test_async_initiate_backup( backup_agent_ids = backup_data.pop("agent_ids") assert backup_agent_ids == agent_ids + assert backup_data == { + "addons": [], + "backup_id": ANY, + "database_included": include_database, + "date": ANY, + "failed_agent_ids": [], + "folders": [], + "homeassistant_included": True, + "homeassistant_version": "2025.1.0", + "name": name, + "protected": bool(password), + "size": ANY, + "with_automatic_settings": False, + } - backup = AgentBackup.from_dict(backup_data) - - assert backup == AgentBackup( - addons=[], - backup_id=ANY, - database_included=include_database, - date=ANY, - folders=[], - homeassistant_included=True, - homeassistant_version="2025.1.0", - name=name, - protected=bool(password), - size=ANY, - ) for agent_id in agent_ids: agent = agents[agent_id] assert len(agent._backups) == 1 - agent_backup = agent._backups[backup.backup_id] - assert agent_backup.backup_id == backup.backup_id - assert agent_backup.date == backup.date - assert agent_backup.name == backup.name - assert agent_backup.protected == backup.protected - assert agent_backup.size == backup.size + agent_backup = agent._backups[backup_data["backup_id"]] + assert agent_backup.backup_id == backup_data["backup_id"] + assert agent_backup.date == backup_data["date"] + assert agent_backup.name == backup_data["name"] + assert agent_backup.protected == backup_data["protected"] + assert agent_backup.size == backup_data["size"] outer_tar = mocked_tarfile.return_value core_tar = outer_tar.create_inner_tar.return_value.__enter__.return_value @@ -380,7 +388,7 @@ async def test_async_initiate_backup( tar_file_path = str(mocked_tarfile.call_args_list[0][0][0]) backup_directory = hass.config.path(backup_directory) - assert tar_file_path == f"{backup_directory}/{backup.backup_id}.tar" + assert tar_file_path == f"{backup_directory}/{backup_data["backup_id"]}.tar" @pytest.mark.usefixtures("mock_backup_generation") @@ -522,7 +530,6 @@ async def test_async_initiate_backup_with_agent_error( { "backup_id": "abc123", "failed_agent_ids": ["test.remote"], - "with_automatic_settings": False, } ] diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 1a0e2cc1a81..a0860f49149 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -34,6 +34,7 @@ from tests.typing import WebSocketGenerator BACKUP_CALL = call( agent_ids=["test.test-agent"], backup_name="test-name", + extra_metadata={"instance_id": ANY, "with_automatic_settings": True}, include_addons=["test-addon"], include_all_addons=False, include_database=True, @@ -276,7 +277,6 @@ async def test_delete( { "backup_id": "abc123", "failed_agent_ids": ["test.remote"], - "with_automatic_settings": False, } ] }, diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 57c801e0d68..93747ca25f7 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -108,6 +108,7 @@ def mock_list_files() -> Generator[MagicMock]: "backup_id": "23e64aec", "date": "2024-11-22T11:48:48.727189+01:00", "database_included": True, + "extra_metadata": {}, "folders": [], "homeassistant_included": True, "homeassistant_version": "2024.12.0.dev0", @@ -335,6 +336,7 @@ async def test_agents_upload( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -390,6 +392,7 @@ async def test_agents_upload_fail_put( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -438,6 +441,7 @@ async def test_agents_upload_fail_cloud( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -479,6 +483,7 @@ async def test_agents_upload_not_protected( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index c342c006732..9338313c87d 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -12,7 +12,7 @@ from datetime import datetime from io import StringIO import os from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import ANY, AsyncMock, Mock, patch from aiohasupervisor.exceptions import ( SupervisorBadRequestError, @@ -445,6 +445,7 @@ async def test_agent_upload( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", @@ -622,6 +623,10 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( addons=None, background=True, compressed=True, + extra={ + "instance_id": ANY, + "with_automatic_settings": False, + }, folders=None, homeassistant_exclude_database=False, homeassistant=True, @@ -876,6 +881,7 @@ async def test_agent_receive_remote_backup( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={}, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", diff --git a/tests/components/kitchen_sink/test_backup.py b/tests/components/kitchen_sink/test_backup.py index 25ae2e3a2f6..9e46845e1cb 100644 --- a/tests/components/kitchen_sink/test_backup.py +++ b/tests/components/kitchen_sink/test_backup.py @@ -14,6 +14,7 @@ from homeassistant.components.backup import ( ) from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.core import HomeAssistant +from homeassistant.helpers import instance_id from homeassistant.setup import async_setup_component from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -137,6 +138,10 @@ async def test_agents_upload( backup_id=backup_id, database_included=True, date="1970-01-01T00:00:00.000Z", + extra_metadata={ + "instance_id": await instance_id.async_get(hass), + "with_automatic_settings": False, + }, folders=[Folder.MEDIA, Folder.SHARE], homeassistant_included=True, homeassistant_version="2024.12.0", From bb2d027532a0b481abf4f3b0536bb8c0d199cafe Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 18 Dec 2024 19:11:13 +0100 Subject: [PATCH 434/677] Add Peblar Rocksolid EV Chargers integration (#133501) * Add Peblar Rocksolid EV Chargers integration * Process review comments --- .strict-typing | 1 + CODEOWNERS | 2 + homeassistant/components/peblar/__init__.py | 54 ++++++++ .../components/peblar/config_flow.py | 71 +++++++++++ homeassistant/components/peblar/const.py | 10 ++ .../components/peblar/coordinator.py | 37 ++++++ homeassistant/components/peblar/entity.py | 26 ++++ homeassistant/components/peblar/manifest.json | 11 ++ .../components/peblar/quality_scale.yaml | 79 ++++++++++++ homeassistant/components/peblar/sensor.py | 73 +++++++++++ homeassistant/components/peblar/strings.json | 25 ++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + mypy.ini | 10 ++ requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/peblar/__init__.py | 1 + tests/components/peblar/conftest.py | 48 ++++++++ .../peblar/fixtures/system_information.json | 57 +++++++++ tests/components/peblar/test_config_flow.py | 115 ++++++++++++++++++ 20 files changed, 633 insertions(+) create mode 100644 homeassistant/components/peblar/__init__.py create mode 100644 homeassistant/components/peblar/config_flow.py create mode 100644 homeassistant/components/peblar/const.py create mode 100644 homeassistant/components/peblar/coordinator.py create mode 100644 homeassistant/components/peblar/entity.py create mode 100644 homeassistant/components/peblar/manifest.json create mode 100644 homeassistant/components/peblar/quality_scale.yaml create mode 100644 homeassistant/components/peblar/sensor.py create mode 100644 homeassistant/components/peblar/strings.json create mode 100644 tests/components/peblar/__init__.py create mode 100644 tests/components/peblar/conftest.py create mode 100644 tests/components/peblar/fixtures/system_information.json create mode 100644 tests/components/peblar/test_config_flow.py diff --git a/.strict-typing b/.strict-typing index 899b22af35f..a96597da4c6 100644 --- a/.strict-typing +++ b/.strict-typing @@ -363,6 +363,7 @@ homeassistant.components.otbr.* homeassistant.components.overkiz.* homeassistant.components.p1_monitor.* homeassistant.components.panel_custom.* +homeassistant.components.peblar.* homeassistant.components.peco.* homeassistant.components.persistent_notification.* homeassistant.components.pi_hole.* diff --git a/CODEOWNERS b/CODEOWNERS index 8effcc49336..382fbffecaa 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1113,6 +1113,8 @@ build.json @home-assistant/supervisor /tests/components/palazzetti/ @dotvav /homeassistant/components/panel_custom/ @home-assistant/frontend /tests/components/panel_custom/ @home-assistant/frontend +/homeassistant/components/peblar/ @frenck +/tests/components/peblar/ @frenck /homeassistant/components/peco/ @IceBotYT /tests/components/peco/ @IceBotYT /homeassistant/components/pegel_online/ @mib1185 diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py new file mode 100644 index 00000000000..559b124c772 --- /dev/null +++ b/homeassistant/components/peblar/__init__.py @@ -0,0 +1,54 @@ +"""Integration for Peblar EV chargers.""" + +from __future__ import annotations + +from aiohttp import CookieJar +from peblar import ( + AccessMode, + Peblar, + PeblarAuthenticationError, + PeblarConnectionError, + PeblarError, +) + +from homeassistant.const import CONF_HOST, CONF_PASSWORD, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_create_clientsession + +from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator + +PLATFORMS = [Platform.SENSOR] + + +async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bool: + """Set up Peblar from a config entry.""" + + peblar = Peblar( + host=entry.data[CONF_HOST], + session=async_create_clientsession(hass, cookie_jar=CookieJar(unsafe=True)), + ) + try: + await peblar.login(password=entry.data[CONF_PASSWORD]) + api = await peblar.rest_api(enable=True, access_mode=AccessMode.READ_WRITE) + except PeblarConnectionError as err: + raise ConfigEntryNotReady("Could not connect to Peblar charger") from err + except PeblarAuthenticationError as err: + raise ConfigEntryError("Could not login to Peblar charger") from err + except PeblarError as err: + raise ConfigEntryNotReady( + "Unknown error occurred while connecting to Peblar charger" + ) from err + + coordinator = PeblarMeterDataUpdateCoordinator(hass, entry, api) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bool: + """Unload Peblar config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/peblar/config_flow.py b/homeassistant/components/peblar/config_flow.py new file mode 100644 index 00000000000..056d4a68be6 --- /dev/null +++ b/homeassistant/components/peblar/config_flow.py @@ -0,0 +1,71 @@ +"""Config flow to configure the Peblar integration.""" + +from __future__ import annotations + +from typing import Any + +from aiohttp import CookieJar +from peblar import Peblar, PeblarAuthenticationError, PeblarConnectionError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PASSWORD +from homeassistant.helpers.aiohttp_client import async_create_clientsession +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import DOMAIN, LOGGER + + +class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): + """Handle a Peblar config flow.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors = {} + + if user_input is not None: + peblar = Peblar( + host=user_input[CONF_HOST], + session=async_create_clientsession( + self.hass, cookie_jar=CookieJar(unsafe=True) + ), + ) + try: + await peblar.login(password=user_input[CONF_PASSWORD]) + info = await peblar.system_information() + except PeblarAuthenticationError: + errors[CONF_PASSWORD] = "invalid_auth" + except PeblarConnectionError: + errors[CONF_HOST] = "cannot_connect" + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(info.product_serial_number) + self._abort_if_unique_id_configured() + return self.async_create_entry(title="Peblar", data=user_input) + else: + user_input = {} + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required( + CONF_HOST, default=user_input.get(CONF_HOST) + ): TextSelector(TextSelectorConfig(autocomplete="off")), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } + ), + errors=errors, + ) diff --git a/homeassistant/components/peblar/const.py b/homeassistant/components/peblar/const.py new file mode 100644 index 00000000000..b986c866d16 --- /dev/null +++ b/homeassistant/components/peblar/const.py @@ -0,0 +1,10 @@ +"""Constants for the Peblar integration.""" + +from __future__ import annotations + +import logging +from typing import Final + +DOMAIN: Final = "peblar" + +LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py new file mode 100644 index 00000000000..8270905648f --- /dev/null +++ b/homeassistant/components/peblar/coordinator.py @@ -0,0 +1,37 @@ +"""Data update coordinator for Peblar EV chargers.""" + +from datetime import timedelta + +from peblar import PeblarApi, PeblarError, PeblarMeter + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import LOGGER + +type PeblarConfigEntry = ConfigEntry[PeblarMeterDataUpdateCoordinator] + + +class PeblarMeterDataUpdateCoordinator(DataUpdateCoordinator[PeblarMeter]): + """Class to manage fetching Peblar meter data.""" + + def __init__( + self, hass: HomeAssistant, entry: PeblarConfigEntry, api: PeblarApi + ) -> None: + """Initialize the coordinator.""" + self.api = api + super().__init__( + hass, + LOGGER, + config_entry=entry, + name=f"Peblar {entry.title} meter", + update_interval=timedelta(seconds=10), + ) + + async def _async_update_data(self) -> PeblarMeter: + """Fetch data from the Peblar device.""" + try: + return await self.api.meter() + except PeblarError as err: + raise UpdateFailed(err) from err diff --git a/homeassistant/components/peblar/entity.py b/homeassistant/components/peblar/entity.py new file mode 100644 index 00000000000..6951cf6c21f --- /dev/null +++ b/homeassistant/components/peblar/entity.py @@ -0,0 +1,26 @@ +"""Base entity for the Peblar integration.""" + +from __future__ import annotations + +from homeassistant.const import CONF_HOST +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator + + +class PeblarEntity(CoordinatorEntity[PeblarMeterDataUpdateCoordinator]): + """Defines a Peblar entity.""" + + _attr_has_entity_name = True + + def __init__(self, entry: PeblarConfigEntry) -> None: + """Initialize the Peblar entity.""" + super().__init__(coordinator=entry.runtime_data) + self._attr_device_info = DeviceInfo( + configuration_url=f"http://{entry.data[CONF_HOST]}", + identifiers={(DOMAIN, str(entry.unique_id))}, + manufacturer="Peblar", + name="Peblar EV charger", + ) diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json new file mode 100644 index 00000000000..6de605c95dc --- /dev/null +++ b/homeassistant/components/peblar/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "peblar", + "name": "Peblar", + "codeowners": ["@frenck"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/peblar", + "integration_type": "device", + "iot_class": "local_polling", + "quality_scale": "bronze", + "requirements": ["peblar==0.2.1"] +} diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml new file mode 100644 index 00000000000..51bd60cc4b4 --- /dev/null +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -0,0 +1,79 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Integration does not register custom actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not have any custom actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: todo + test-coverage: todo + # Gold + devices: todo + diagnostics: todo + discovery-update-info: todo + discovery: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: todo + entity-device-class: todo + entity-disabled-by-default: todo + entity-translations: todo + exception-translations: + status: exempt + comment: | + The coordinator needs translation when the update failed. + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py new file mode 100644 index 00000000000..eafca23e125 --- /dev/null +++ b/homeassistant/components/peblar/sensor.py @@ -0,0 +1,73 @@ +"""Support for Peblar sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from peblar import PeblarMeter + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfEnergy +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import PeblarConfigEntry +from .entity import PeblarEntity + + +@dataclass(frozen=True, kw_only=True) +class PeblarSensorDescription(SensorEntityDescription): + """Describe an Peblar sensor.""" + + value_fn: Callable[[PeblarMeter], int | None] + + +SENSORS: tuple[PeblarSensorDescription, ...] = ( + PeblarSensorDescription( + key="energy_total", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + state_class=SensorStateClass.TOTAL_INCREASING, + suggested_display_precision=2, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda x: x.energy_total, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar sensors based on a config entry.""" + async_add_entities( + PeblarSensorEntity(entry, description) for description in SENSORS + ) + + +class PeblarSensorEntity(PeblarEntity, SensorEntity): + """Defines a Peblar sensor.""" + + entity_description: PeblarSensorDescription + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarSensorDescription, + ) -> None: + """Initialize the Peblar entity.""" + super().__init__(entry) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}_{description.key}" + + @property + def native_value(self) -> int | None: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json new file mode 100644 index 00000000000..9bf4803b592 --- /dev/null +++ b/homeassistant/components/peblar/strings.json @@ -0,0 +1,25 @@ +{ + "config": { + "step": { + "user": { + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar charger and the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant.", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "The hostname or IP address of your Peblar charger on your home network.", + "password": "The same password as you use to log in to the Peblar device' local web interface." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 8e88e8a2ae8..599cc43c08b 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -452,6 +452,7 @@ FLOWS = { "p1_monitor", "palazzetti", "panasonic_viera", + "peblar", "peco", "pegel_online", "permobil", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index bd3c9eb04f9..48fedd9c127 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -4618,6 +4618,12 @@ "integration_type": "virtual", "supported_by": "upb" }, + "peblar": { + "name": "Peblar", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "peco": { "name": "PECO Outage Counter", "integration_type": "hub", diff --git a/mypy.ini b/mypy.ini index 15b96e0a802..ca7195ef92f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3386,6 +3386,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.peblar.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.peco.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index af2457b8d88..1b1938b2e4f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1599,6 +1599,9 @@ panasonic-viera==0.4.2 # homeassistant.components.dunehd pdunehd==1.3.2 +# homeassistant.components.peblar +peblar==0.2.1 + # homeassistant.components.peco peco==0.0.30 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f7f79ed6200..93a7979600d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1326,6 +1326,9 @@ panasonic-viera==0.4.2 # homeassistant.components.dunehd pdunehd==1.3.2 +# homeassistant.components.peblar +peblar==0.2.1 + # homeassistant.components.peco peco==0.0.30 diff --git a/tests/components/peblar/__init__.py b/tests/components/peblar/__init__.py new file mode 100644 index 00000000000..9180d51e98b --- /dev/null +++ b/tests/components/peblar/__init__.py @@ -0,0 +1 @@ +"""Integration tests for the Peblar integration.""" diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py new file mode 100644 index 00000000000..dfe6aabc6bc --- /dev/null +++ b/tests/components/peblar/conftest.py @@ -0,0 +1,48 @@ +"""Fixtures for the Peblar integration tests.""" + +from __future__ import annotations + +from collections.abc import Generator +from unittest.mock import MagicMock, patch + +from peblar.models import PeblarSystemInformation +import pytest + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PASSWORD + +from tests.common import MockConfigEntry, load_fixture + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="Peblar", + domain=DOMAIN, + data={ + CONF_HOST: "127.0.0.127", + CONF_PASSWORD: "OMGSPIDERS", + }, + unique_id="23-45-A4O-MOF", + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[None]: + """Mock setting up a config entry.""" + with patch("homeassistant.components.peblar.async_setup_entry", return_value=True): + yield + + +@pytest.fixture +def mock_peblar() -> Generator[MagicMock]: + """Return a mocked Peblar client.""" + with patch( + "homeassistant.components.peblar.config_flow.Peblar", autospec=True + ) as peblar_mock: + peblar = peblar_mock.return_value + peblar.system_information.return_value = PeblarSystemInformation.from_json( + load_fixture("system_information.json", DOMAIN) + ) + yield peblar diff --git a/tests/components/peblar/fixtures/system_information.json b/tests/components/peblar/fixtures/system_information.json new file mode 100644 index 00000000000..dcec52a37fe --- /dev/null +++ b/tests/components/peblar/fixtures/system_information.json @@ -0,0 +1,57 @@ +{ + "BopCalIGainA": 264625, + "BopCalIGainB": 267139, + "BopCalIGainC": 239155, + "CanChangeChargingPhases": false, + "CanChargeSinglePhase": true, + "CanChargeThreePhases": false, + "CustomerId": "PBLR-0000645", + "CustomerUpdatePackagePubKey": "-----BEGIN PUBLIC KEY-----\nlorem ipsum\n-----END PUBLIC KEY-----\n", + "EthMacAddr": "00:0F:11:58:86:97", + "FwIdent": "1.6.1+1+WL-1", + "Hostname": "PBLR-0000645", + "HwFixedCableRating": 20, + "HwFwCompat": "wlac-2", + "HwHas4pRelay": false, + "HwHasBop": true, + "HwHasBuzzer": true, + "HwHasDualSocket": false, + "HwHasEichrechtLaserMarking": false, + "HwHasEthernet": true, + "HwHasLed": true, + "HwHasLte": false, + "HwHasMeter": true, + "HwHasMeterDisplay": true, + "HwHasPlc": false, + "HwHasRfid": true, + "HwHasRs485": true, + "HwHasShutter": false, + "HwHasSocket": false, + "HwHasTpm": false, + "HwHasWlan": true, + "HwMaxCurrent": 16, + "HwOneOrThreePhase": 3, + "HwUKCompliant": false, + "MainboardPn": "6004-2300-7600", + "MainboardSn": "23-38-A4E-2MC", + "MeterCalIGainA": 267369, + "MeterCalIGainB": 228286, + "MeterCalIGainC": 246455, + "MeterCalIRmsOffsetA": 15573, + "MeterCalIRmsOffsetB": 268422963, + "MeterCalIRmsOffsetC": 9082, + "MeterCalPhaseA": 250, + "MeterCalPhaseB": 271, + "MeterCalPhaseC": 271, + "MeterCalVGainA": 250551, + "MeterCalVGainB": 246074, + "MeterCalVGainC": 230191, + "MeterFwIdent": "b9cbcd", + "NorFlash": true, + "ProductModelName": "WLAC1-H11R0WE0ICR00", + "ProductPn": "6004-2300-8002", + "ProductSn": "23-45-A4O-MOF", + "ProductVendorName": "Peblar", + "WlanApMacAddr": "00:0F:11:58:86:98", + "WlanStaMacAddr": "00:0F:11:58:86:99" +} diff --git a/tests/components/peblar/test_config_flow.py b/tests/components/peblar/test_config_flow.py new file mode 100644 index 00000000000..0b2fa89e068 --- /dev/null +++ b/tests/components/peblar/test_config_flow.py @@ -0,0 +1,115 @@ +"""Configuration flow tests for the Peblar integration.""" + +from unittest.mock import MagicMock + +from peblar import PeblarAuthenticationError, PeblarConnectionError +import pytest + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") + + +@pytest.mark.usefixtures("mock_peblar") +async def test_user_flow(hass: HomeAssistant) -> None: + """Test the full happy path user flow from start to finish.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "23-45-A4O-MOF" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + } + assert not config_entry.options + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [ + (PeblarConnectionError, {CONF_HOST: "cannot_connect"}), + (PeblarAuthenticationError, {CONF_PASSWORD: "invalid_auth"}), + (Exception, {"base": "unknown"}), + ], +) +async def test_user_flow_errors( + hass: HomeAssistant, + mock_peblar: MagicMock, + side_effect: Exception, + expected_error: dict[str, str], +) -> None: + """Test we show user form on a connection error.""" + mock_peblar.login.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGCATS!", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == expected_error + + mock_peblar.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.2", + CONF_PASSWORD: "OMGPUPPIES!", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "23-45-A4O-MOF" + assert config_entry.data == { + CONF_HOST: "127.0.0.2", + CONF_PASSWORD: "OMGPUPPIES!", + } + assert not config_entry.options + + +@pytest.mark.usefixtures("mock_peblar") +async def test_user_flow_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test configuration flow aborts when the device is already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGSPIDERS", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From 53ef96c63ea03b16aacad6dd1b178e854c4ba868 Mon Sep 17 00:00:00 2001 From: TJ Horner Date: Wed, 18 Dec 2024 10:21:03 -0800 Subject: [PATCH 435/677] weatherkit: use stale data for up to an hour if updates fail (#130398) --- .../components/weatherkit/coordinator.py | 19 ++++- tests/components/weatherkit/__init__.py | 36 +++++---- .../components/weatherkit/test_coordinator.py | 81 ++++++++++++++++--- tests/components/weatherkit/test_sensor.py | 5 +- tests/components/weatherkit/test_weather.py | 20 +++-- 5 files changed, 126 insertions(+), 35 deletions(-) diff --git a/homeassistant/components/weatherkit/coordinator.py b/homeassistant/components/weatherkit/coordinator.py index ddabba2fc1f..6438d7503db 100644 --- a/homeassistant/components/weatherkit/coordinator.py +++ b/homeassistant/components/weatherkit/coordinator.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import timedelta +from datetime import datetime, timedelta from apple_weatherkit import DataSetType from apple_weatherkit.client import WeatherKitApiClient, WeatherKitApiClientError @@ -20,12 +20,15 @@ REQUESTED_DATA_SETS = [ DataSetType.HOURLY_FORECAST, ] +STALE_DATA_THRESHOLD = timedelta(hours=1) + class WeatherKitDataUpdateCoordinator(DataUpdateCoordinator): """Class to manage fetching data from the API.""" config_entry: ConfigEntry supported_data_sets: list[DataSetType] | None = None + last_updated_at: datetime | None = None def __init__( self, @@ -62,10 +65,20 @@ class WeatherKitDataUpdateCoordinator(DataUpdateCoordinator): if not self.supported_data_sets: await self.update_supported_data_sets() - return await self.client.get_weather_data( + updated_data = await self.client.get_weather_data( self.config_entry.data[CONF_LATITUDE], self.config_entry.data[CONF_LONGITUDE], self.supported_data_sets, ) except WeatherKitApiClientError as exception: - raise UpdateFailed(exception) from exception + if self.data is None or ( + self.last_updated_at is not None + and datetime.now() - self.last_updated_at > STALE_DATA_THRESHOLD + ): + raise UpdateFailed(exception) from exception + + LOGGER.debug("Using stale data because update failed: %s", exception) + return self.data + else: + self.last_updated_at = datetime.now() + return updated_data diff --git a/tests/components/weatherkit/__init__.py b/tests/components/weatherkit/__init__.py index 99c856a7e37..bc7c31fe8d8 100644 --- a/tests/components/weatherkit/__init__.py +++ b/tests/components/weatherkit/__init__.py @@ -1,5 +1,6 @@ """Tests for the Apple WeatherKit integration.""" +from contextlib import contextmanager from unittest.mock import patch from apple_weatherkit import DataSetType @@ -26,20 +27,13 @@ EXAMPLE_CONFIG_DATA = { } -async def init_integration( - hass: HomeAssistant, +@contextmanager +def mock_weather_response( is_night_time: bool = False, has_hourly_forecast: bool = True, has_daily_forecast: bool = True, -) -> MockConfigEntry: - """Set up the WeatherKit integration in Home Assistant.""" - entry = MockConfigEntry( - domain=DOMAIN, - title="Home", - unique_id="0123456", - data=EXAMPLE_CONFIG_DATA, - ) - +): + """Mock a successful WeatherKit API response.""" weather_response = load_json_object_fixture("weatherkit/weather_response.json") available_data_sets = [DataSetType.CURRENT_WEATHER] @@ -68,8 +62,22 @@ async def init_integration( return_value=available_data_sets, ), ): - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + yield + + +async def init_integration( + hass: HomeAssistant, +) -> MockConfigEntry: + """Set up the WeatherKit integration in Home Assistant.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Home", + unique_id="0123456", + data=EXAMPLE_CONFIG_DATA, + ) + + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() return entry diff --git a/tests/components/weatherkit/test_coordinator.py b/tests/components/weatherkit/test_coordinator.py index eff142f3d94..7cc78179f44 100644 --- a/tests/components/weatherkit/test_coordinator.py +++ b/tests/components/weatherkit/test_coordinator.py @@ -4,30 +4,93 @@ from datetime import timedelta from unittest.mock import patch from apple_weatherkit.client import WeatherKitApiClientError +from freezegun.api import FrozenDateTimeFactory from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant -from homeassistant.util.dt import utcnow -from . import init_integration +from . import init_integration, mock_weather_response from tests.common import async_fire_time_changed -async def test_failed_updates(hass: HomeAssistant) -> None: - """Test that we properly handle failed updates.""" - await init_integration(hass) +async def test_update_uses_stale_data_before_threshold( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that stale data from the last successful update is used if an update failure occurs before the threshold.""" + with mock_weather_response(): + await init_integration(hass) + + state = hass.states.get("weather.home") + assert state + assert state.state != STATE_UNAVAILABLE + + initial_state = state.state + + # Expect stale data to be used before one hour with patch( "homeassistant.components.weatherkit.WeatherKitApiClient.get_weather_data", side_effect=WeatherKitApiClientError, ): - async_fire_time_changed( - hass, - utcnow() + timedelta(minutes=5), - ) + freezer.tick(timedelta(minutes=59)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("weather.home") + assert state + assert state.state == initial_state + + +async def test_update_becomes_unavailable_after_threshold( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that the entity becomes unavailable if an update failure occurs after the threshold.""" + with mock_weather_response(): + await init_integration(hass) + + # Expect state to be unavailable after one hour + + with patch( + "homeassistant.components.weatherkit.WeatherKitApiClient.get_weather_data", + side_effect=WeatherKitApiClientError, + ): + freezer.tick(timedelta(hours=1, minutes=5)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get("weather.home") assert state assert state.state == STATE_UNAVAILABLE + + +async def test_update_recovers_after_failure( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that a successful update after repeated failures recovers the entity's state.""" + with mock_weather_response(): + await init_integration(hass) + + # Trigger a failure after threshold + + with patch( + "homeassistant.components.weatherkit.WeatherKitApiClient.get_weather_data", + side_effect=WeatherKitApiClientError, + ): + freezer.tick(timedelta(hours=1, minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Expect that a successful update recovers the entity + + with mock_weather_response(): + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("weather.home") + assert state + assert state.state != STATE_UNAVAILABLE diff --git a/tests/components/weatherkit/test_sensor.py b/tests/components/weatherkit/test_sensor.py index 6c6999c6bfd..6ded9a779d5 100644 --- a/tests/components/weatherkit/test_sensor.py +++ b/tests/components/weatherkit/test_sensor.py @@ -6,7 +6,7 @@ import pytest from homeassistant.core import HomeAssistant -from . import init_integration +from . import init_integration, mock_weather_response @pytest.mark.parametrize( @@ -20,7 +20,8 @@ async def test_sensor_values( hass: HomeAssistant, entity_name: str, expected_value: Any ) -> None: """Test that various sensor values match what we expect.""" - await init_integration(hass) + with mock_weather_response(): + await init_integration(hass) state = hass.states.get(entity_name) assert state diff --git a/tests/components/weatherkit/test_weather.py b/tests/components/weatherkit/test_weather.py index ba20276c22e..ec4ce2ba3b3 100644 --- a/tests/components/weatherkit/test_weather.py +++ b/tests/components/weatherkit/test_weather.py @@ -23,12 +23,13 @@ from homeassistant.components.weatherkit.const import ATTRIBUTION from homeassistant.const import ATTR_ATTRIBUTION, ATTR_SUPPORTED_FEATURES from homeassistant.core import HomeAssistant -from . import init_integration +from . import init_integration, mock_weather_response async def test_current_weather(hass: HomeAssistant) -> None: """Test states of the current weather.""" - await init_integration(hass) + with mock_weather_response(): + await init_integration(hass) state = hass.states.get("weather.home") assert state @@ -49,7 +50,8 @@ async def test_current_weather(hass: HomeAssistant) -> None: async def test_current_weather_nighttime(hass: HomeAssistant) -> None: """Test that the condition is clear-night when it's sunny and night time.""" - await init_integration(hass, is_night_time=True) + with mock_weather_response(is_night_time=True): + await init_integration(hass) state = hass.states.get("weather.home") assert state @@ -58,7 +60,8 @@ async def test_current_weather_nighttime(hass: HomeAssistant) -> None: async def test_daily_forecast_missing(hass: HomeAssistant) -> None: """Test that daily forecast is not supported when WeatherKit doesn't support it.""" - await init_integration(hass, has_daily_forecast=False) + with mock_weather_response(has_daily_forecast=False): + await init_integration(hass) state = hass.states.get("weather.home") assert state @@ -69,7 +72,8 @@ async def test_daily_forecast_missing(hass: HomeAssistant) -> None: async def test_hourly_forecast_missing(hass: HomeAssistant) -> None: """Test that hourly forecast is not supported when WeatherKit doesn't support it.""" - await init_integration(hass, has_hourly_forecast=False) + with mock_weather_response(has_hourly_forecast=False): + await init_integration(hass) state = hass.states.get("weather.home") assert state @@ -86,7 +90,8 @@ async def test_hourly_forecast( hass: HomeAssistant, snapshot: SnapshotAssertion, service: str ) -> None: """Test states of the hourly forecast.""" - await init_integration(hass) + with mock_weather_response(): + await init_integration(hass) response = await hass.services.async_call( WEATHER_DOMAIN, @@ -109,7 +114,8 @@ async def test_daily_forecast( hass: HomeAssistant, snapshot: SnapshotAssertion, service: str ) -> None: """Test states of the daily forecast.""" - await init_integration(hass) + with mock_weather_response(): + await init_integration(hass) response = await hass.services.async_call( WEATHER_DOMAIN, From 70ad4ee454f04da2cd75778f8c83f202da910469 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Wed, 18 Dec 2024 19:32:51 +0100 Subject: [PATCH 436/677] Add select platform to IronOS (#132218) --- homeassistant/components/iron_os/__init__.py | 1 + homeassistant/components/iron_os/icons.json | 32 +- homeassistant/components/iron_os/select.py | 208 ++++++++ homeassistant/components/iron_os/strings.json | 76 +++ tests/components/iron_os/conftest.py | 17 +- .../iron_os/snapshots/test_select.ambr | 469 ++++++++++++++++++ tests/components/iron_os/test_select.py | 164 ++++++ 7 files changed, 963 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/iron_os/select.py create mode 100644 tests/components/iron_os/snapshots/test_select.ambr create mode 100644 tests/components/iron_os/test_select.py diff --git a/homeassistant/components/iron_os/__init__.py b/homeassistant/components/iron_os/__init__.py index 0fe5acc2db6..9655f7bfcdd 100644 --- a/homeassistant/components/iron_os/__init__.py +++ b/homeassistant/components/iron_os/__init__.py @@ -28,6 +28,7 @@ from .coordinator import ( PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.NUMBER, + Platform.SELECT, Platform.SENSOR, Platform.UPDATE, ] diff --git a/homeassistant/components/iron_os/icons.json b/homeassistant/components/iron_os/icons.json index eadcc17bb37..0d26b027c3f 100644 --- a/homeassistant/components/iron_os/icons.json +++ b/homeassistant/components/iron_os/icons.json @@ -63,13 +63,39 @@ "min_voltage_per_cell": { "default": "mdi:fuel-cell" }, - "min_dc_voltage_cells": { - "default": "mdi:battery-arrow-down" - }, "power_limit": { "default": "mdi:flash-alert" } }, + "select": { + "locking_mode": { + "default": "mdi:download-lock" + }, + "orientation_mode": { + "default": "mdi:screen-rotation" + }, + "autostart_mode": { + "default": "mdi:power-standby" + }, + "animation_speed": { + "default": "mdi:image-refresh" + }, + "min_dc_voltage_cells": { + "default": "mdi:fuel-cell" + }, + "temp_unit": { + "default": "mdi:temperature-celsius", + "state": { + "fahrenheit": "mdi:temperature-fahrenheit" + } + }, + "desc_scroll_speed": { + "default": "mdi:message-text-fast" + }, + "logo_duration": { + "default": "mdi:clock-digital" + } + }, "sensor": { "live_temperature": { "default": "mdi:soldering-iron" diff --git a/homeassistant/components/iron_os/select.py b/homeassistant/components/iron_os/select.py new file mode 100644 index 00000000000..c863e076f0b --- /dev/null +++ b/homeassistant/components/iron_os/select.py @@ -0,0 +1,208 @@ +"""Select platform for IronOS integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from enum import Enum, StrEnum +from typing import Any + +from pynecil import ( + AnimationSpeed, + AutostartMode, + BatteryType, + CharSetting, + CommunicationError, + LockingMode, + LogoDuration, + ScreenOrientationMode, + ScrollSpeed, + SettingsDataResponse, + TempUnit, +) + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IronOSConfigEntry +from .const import DOMAIN +from .coordinator import IronOSCoordinators +from .entity import IronOSBaseEntity + +PARALLEL_UPDATES = 0 + + +@dataclass(frozen=True, kw_only=True) +class IronOSSelectEntityDescription(SelectEntityDescription): + """Describes IronOS select entity.""" + + value_fn: Callable[[SettingsDataResponse], str | None] + characteristic: CharSetting + raw_value_fn: Callable[[str], Any] | None = None + + +class PinecilSelect(StrEnum): + """Select controls for Pinecil device.""" + + MIN_DC_VOLTAGE_CELLS = "min_dc_voltage_cells" + ORIENTATION_MODE = "orientation_mode" + ANIMATION_SPEED = "animation_speed" + AUTOSTART_MODE = "autostart_mode" + TEMP_UNIT = "temp_unit" + DESC_SCROLL_SPEED = "desc_scroll_speed" + LOCKING_MODE = "locking_mode" + LOGO_DURATION = "logo_duration" + + +def enum_to_str(enum: Enum | None) -> str | None: + """Convert enum name to lower-case string.""" + return enum.name.lower() if isinstance(enum, Enum) else None + + +PINECIL_SELECT_DESCRIPTIONS: tuple[IronOSSelectEntityDescription, ...] = ( + IronOSSelectEntityDescription( + key=PinecilSelect.MIN_DC_VOLTAGE_CELLS, + translation_key=PinecilSelect.MIN_DC_VOLTAGE_CELLS, + characteristic=CharSetting.MIN_DC_VOLTAGE_CELLS, + value_fn=lambda x: enum_to_str(x.get("min_dc_voltage_cells")), + raw_value_fn=lambda value: BatteryType[value.upper()], + options=[x.name.lower() for x in BatteryType], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.ORIENTATION_MODE, + translation_key=PinecilSelect.ORIENTATION_MODE, + characteristic=CharSetting.ORIENTATION_MODE, + value_fn=lambda x: enum_to_str(x.get("orientation_mode")), + raw_value_fn=lambda value: ScreenOrientationMode[value.upper()], + options=[x.name.lower() for x in ScreenOrientationMode], + entity_category=EntityCategory.CONFIG, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.ANIMATION_SPEED, + translation_key=PinecilSelect.ANIMATION_SPEED, + characteristic=CharSetting.ANIMATION_SPEED, + value_fn=lambda x: enum_to_str(x.get("animation_speed")), + raw_value_fn=lambda value: AnimationSpeed[value.upper()], + options=[x.name.lower() for x in AnimationSpeed], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.AUTOSTART_MODE, + translation_key=PinecilSelect.AUTOSTART_MODE, + characteristic=CharSetting.AUTOSTART_MODE, + value_fn=lambda x: enum_to_str(x.get("autostart_mode")), + raw_value_fn=lambda value: AutostartMode[value.upper()], + options=[x.name.lower() for x in AutostartMode], + entity_category=EntityCategory.CONFIG, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.TEMP_UNIT, + translation_key=PinecilSelect.TEMP_UNIT, + characteristic=CharSetting.TEMP_UNIT, + value_fn=lambda x: enum_to_str(x.get("temp_unit")), + raw_value_fn=lambda value: TempUnit[value.upper()], + options=[x.name.lower() for x in TempUnit], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.DESC_SCROLL_SPEED, + translation_key=PinecilSelect.DESC_SCROLL_SPEED, + characteristic=CharSetting.DESC_SCROLL_SPEED, + value_fn=lambda x: enum_to_str(x.get("desc_scroll_speed")), + raw_value_fn=lambda value: ScrollSpeed[value.upper()], + options=[x.name.lower() for x in ScrollSpeed], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.LOCKING_MODE, + translation_key=PinecilSelect.LOCKING_MODE, + characteristic=CharSetting.LOCKING_MODE, + value_fn=lambda x: enum_to_str(x.get("locking_mode")), + raw_value_fn=lambda value: LockingMode[value.upper()], + options=[x.name.lower() for x in LockingMode], + entity_category=EntityCategory.CONFIG, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.LOGO_DURATION, + translation_key=PinecilSelect.LOGO_DURATION, + characteristic=CharSetting.LOGO_DURATION, + value_fn=lambda x: enum_to_str(x.get("logo_duration")), + raw_value_fn=lambda value: LogoDuration[value.upper()], + options=[x.name.lower() for x in LogoDuration], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: IronOSConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up select entities from a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + IronOSSelectEntity(coordinator, description) + for description in PINECIL_SELECT_DESCRIPTIONS + ) + + +class IronOSSelectEntity(IronOSBaseEntity, SelectEntity): + """Implementation of a IronOS select entity.""" + + entity_description: IronOSSelectEntityDescription + + def __init__( + self, + coordinator: IronOSCoordinators, + entity_description: IronOSSelectEntityDescription, + ) -> None: + """Initialize the select entity.""" + super().__init__( + coordinator.live_data, entity_description, entity_description.characteristic + ) + + self.settings = coordinator.settings + + @property + def current_option(self) -> str | None: + """Return the selected entity option to represent the entity state.""" + + return self.entity_description.value_fn(self.settings.data) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + + if raw_value_fn := self.entity_description.raw_value_fn: + value = raw_value_fn(option) + try: + await self.coordinator.device.write( + self.entity_description.characteristic, value + ) + except CommunicationError as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="submit_setting_failed", + ) from e + await self.settings.async_request_refresh() + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + + await super().async_added_to_hass() + self.async_on_remove( + self.settings.async_add_listener( + self._handle_coordinator_update, self.entity_description.characteristic + ) + ) + await self.settings.async_request_refresh() diff --git a/homeassistant/components/iron_os/strings.json b/homeassistant/components/iron_os/strings.json index 13528104f8c..04c55280550 100644 --- a/homeassistant/components/iron_os/strings.json +++ b/homeassistant/components/iron_os/strings.json @@ -1,4 +1,8 @@ { + "common": { + "slow": "Slow", + "fast": "Fast" + }, "config": { "step": { "user": { @@ -84,6 +88,78 @@ "name": "Long-press temperature step" } }, + "select": { + "min_dc_voltage_cells": { + "name": "Power source", + "state": { + "no_battery": "External power supply (DC)", + "battery_3s": "3S (3 cells)", + "battery_4s": "4S (4 cells)", + "battery_5s": "5S (5 cells)", + "battery_6s": "6S (6 cells)" + } + }, + "orientation_mode": { + "name": "Display orientation mode", + "state": { + "right_handed": "Right-handed", + "left_handed": "Left-handed", + "auto": "Auto" + } + }, + "animation_speed": { + "name": "Animation speed", + "state": { + "off": "[%key:common::state::off%]", + "slow": "[%key:component::iron_os::common::slow%]", + "medium": "Medium", + "fast": "[%key:component::iron_os::common::fast%]" + } + }, + "autostart_mode": { + "name": "Start-up behavior", + "state": { + "disabled": "[%key:common::state::disabled%]", + "soldering": "Soldering mode", + "sleeping": "Sleeping mode", + "idle": "Idle mode" + } + }, + "temp_unit": { + "name": "Temperature display unit", + "state": { + "celsius": "Celsius (C°)", + "fahrenheit": "Fahrenheit (F°)" + } + }, + "desc_scroll_speed": { + "name": "Scrolling speed", + "state": { + "slow": "[%key:component::iron_os::common::slow%]", + "fast": "[%key:component::iron_os::common::fast%]" + } + }, + "locking_mode": { + "name": "Button locking mode", + "state": { + "off": "[%key:common::state::off%]", + "boost_only": "Boost only", + "full_locking": "Full locking" + } + }, + "logo_duration": { + "name": "Boot logo duration", + "state": { + "off": "[%key:common::state::off%]", + "seconds_1": "1 second", + "seconds_2": "2 second", + "seconds_3": "3 second", + "seconds_4": "4 second", + "seconds_5": "5 second", + "loop": "Loop" + } + } + }, "sensor": { "live_temperature": { "name": "Tip temperature" diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py index 9091694e6a5..356c7358c55 100644 --- a/tests/components/iron_os/conftest.py +++ b/tests/components/iron_os/conftest.py @@ -6,12 +6,20 @@ from unittest.mock import AsyncMock, MagicMock, patch from bleak.backends.device import BLEDevice from habluetooth import BluetoothServiceInfoBleak from pynecil import ( + AnimationSpeed, + AutostartMode, + BatteryType, DeviceInfoResponse, LatestRelease, LiveDataResponse, + LockingMode, + LogoDuration, OperatingMode, PowerSource, + ScreenOrientationMode, + ScrollSpeed, SettingsDataResponse, + TempUnit, ) import pytest @@ -151,7 +159,7 @@ def mock_pynecil() -> Generator[AsyncMock]: client.get_settings.return_value = SettingsDataResponse( sleep_temp=150, sleep_timeout=5, - min_dc_voltage_cells=0, + min_dc_voltage_cells=BatteryType.BATTERY_3S, min_volltage_per_cell=3.3, qc_ideal_voltage=9.0, accel_sensitivity=7, @@ -168,6 +176,13 @@ def mock_pynecil() -> Generator[AsyncMock]: hall_sensitivity=7, pd_negotiation_timeout=2.0, display_brightness=3, + orientation_mode=ScreenOrientationMode.RIGHT_HANDED, + animation_speed=AnimationSpeed.MEDIUM, + autostart_mode=AutostartMode.IDLE, + temp_unit=TempUnit.CELSIUS, + desc_scroll_speed=ScrollSpeed.FAST, + logo_duration=LogoDuration.LOOP, + locking_mode=LockingMode.FULL_LOCKING, ) client.get_live_data.return_value = LiveDataResponse( live_temp=298, diff --git a/tests/components/iron_os/snapshots/test_select.ambr b/tests/components/iron_os/snapshots/test_select.ambr new file mode 100644 index 00000000000..ce6045c1243 --- /dev/null +++ b/tests/components/iron_os/snapshots/test_select.ambr @@ -0,0 +1,469 @@ +# serializer version: 1 +# name: test_state[select.pinecil_animation_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'slow', + 'medium', + 'fast', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_animation_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Animation speed', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_animation_speed', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_animation_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Animation speed', + 'options': list([ + 'off', + 'slow', + 'medium', + 'fast', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_animation_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'medium', + }) +# --- +# name: test_state[select.pinecil_boot_logo_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'seconds_1', + 'seconds_2', + 'seconds_3', + 'seconds_4', + 'seconds_5', + 'loop', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_boot_logo_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Boot logo duration', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_logo_duration', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_boot_logo_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Boot logo duration', + 'options': list([ + 'off', + 'seconds_1', + 'seconds_2', + 'seconds_3', + 'seconds_4', + 'seconds_5', + 'loop', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_boot_logo_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'loop', + }) +# --- +# name: test_state[select.pinecil_button_locking_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'boost_only', + 'full_locking', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_button_locking_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Button locking mode', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_locking_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_button_locking_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Button locking mode', + 'options': list([ + 'off', + 'boost_only', + 'full_locking', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_button_locking_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'full_locking', + }) +# --- +# name: test_state[select.pinecil_display_orientation_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'right_handed', + 'left_handed', + 'auto', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_display_orientation_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Display orientation mode', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_orientation_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_display_orientation_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Display orientation mode', + 'options': list([ + 'right_handed', + 'left_handed', + 'auto', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_display_orientation_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'right_handed', + }) +# --- +# name: test_state[select.pinecil_power_source-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_battery', + 'battery_3s', + 'battery_4s', + 'battery_5s', + 'battery_6s', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_power_source', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power source', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_min_dc_voltage_cells', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_power_source-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Power source', + 'options': list([ + 'no_battery', + 'battery_3s', + 'battery_4s', + 'battery_5s', + 'battery_6s', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_power_source', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'battery_3s', + }) +# --- +# name: test_state[select.pinecil_scrolling_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'slow', + 'fast', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_scrolling_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Scrolling speed', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_desc_scroll_speed', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_scrolling_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Scrolling speed', + 'options': list([ + 'slow', + 'fast', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_scrolling_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'fast', + }) +# --- +# name: test_state[select.pinecil_start_up_behavior-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'disabled', + 'soldering', + 'sleeping', + 'idle', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_start_up_behavior', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start-up behavior', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_autostart_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_start_up_behavior-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Start-up behavior', + 'options': list([ + 'disabled', + 'soldering', + 'sleeping', + 'idle', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_start_up_behavior', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_state[select.pinecil_temperature_display_unit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'celsius', + 'fahrenheit', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_temperature_display_unit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Temperature display unit', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_temp_unit', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_temperature_display_unit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Temperature display unit', + 'options': list([ + 'celsius', + 'fahrenheit', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_temperature_display_unit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'celsius', + }) +# --- diff --git a/tests/components/iron_os/test_select.py b/tests/components/iron_os/test_select.py new file mode 100644 index 00000000000..5e981e1618e --- /dev/null +++ b/tests/components/iron_os/test_select.py @@ -0,0 +1,164 @@ +"""Tests for the IronOS select platform.""" + +from collections.abc import AsyncGenerator +from datetime import timedelta +from enum import Enum +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pynecil import ( + AnimationSpeed, + BatteryType, + CharSetting, + CommunicationError, + LockingMode, + LogoDuration, + ScreenOrientationMode, + ScrollSpeed, + TempUnit, +) +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.fixture(autouse=True) +async def select_only() -> AsyncGenerator[None]: + """Enable only the select platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.SELECT], + ): + yield + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_pynecil", "ble_device" +) +async def test_state( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the IronOS select platform states.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "option", "call_params"), + [ + ( + "select.pinecil_power_source", + "battery_3s", + (CharSetting.MIN_DC_VOLTAGE_CELLS, BatteryType.BATTERY_3S), + ), + ( + "select.pinecil_display_orientation_mode", + "right_handed", + (CharSetting.ORIENTATION_MODE, ScreenOrientationMode.RIGHT_HANDED), + ), + ( + "select.pinecil_animation_speed", + "medium", + (CharSetting.ANIMATION_SPEED, AnimationSpeed.MEDIUM), + ), + ( + "select.pinecil_temperature_display_unit", + "fahrenheit", + (CharSetting.TEMP_UNIT, TempUnit.FAHRENHEIT), + ), + ( + "select.pinecil_scrolling_speed", + "fast", + (CharSetting.DESC_SCROLL_SPEED, ScrollSpeed.FAST), + ), + ( + "select.pinecil_button_locking_mode", + "full_locking", + (CharSetting.LOCKING_MODE, LockingMode.FULL_LOCKING), + ), + ( + "select.pinecil_boot_logo_duration", + "loop", + (CharSetting.LOGO_DURATION, LogoDuration.LOOP), + ), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_select_option( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, + entity_id: str, + option: str, + call_params: tuple[Enum, ...], +) -> None: + """Test the IronOS select option service.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + service_data={ATTR_OPTION: option}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert len(mock_pynecil.write.mock_calls) == 1 + mock_pynecil.write.assert_called_once_with(*call_params) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_select_option_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test the IronOS select option service exception.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_pynecil.write.side_effect = CommunicationError + + with pytest.raises( + ServiceValidationError, + match="Failed to submit setting to device, try again later", + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + service_data={ATTR_OPTION: "battery_3s"}, + target={ATTR_ENTITY_ID: "select.pinecil_power_source"}, + blocking=True, + ) From 352e948d56b2ba048d536c63d542e9c35646b068 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Wed, 18 Dec 2024 19:33:33 +0100 Subject: [PATCH 437/677] Add tests for already_configured erros in IronOS integration (#132265) --- .../components/iron_os/quality_scale.yaml | 2 +- tests/components/iron_os/test_config_flow.py | 54 ++++++++++++++++--- 2 files changed, 49 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/iron_os/quality_scale.yaml b/homeassistant/components/iron_os/quality_scale.yaml index 5ede3d6971d..922702b8260 100644 --- a/homeassistant/components/iron_os/quality_scale.yaml +++ b/homeassistant/components/iron_os/quality_scale.yaml @@ -6,7 +6,7 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: todo + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: diff --git a/tests/components/iron_os/test_config_flow.py b/tests/components/iron_os/test_config_flow.py index 231ec6cc3d6..e1ac8fb9f00 100644 --- a/tests/components/iron_os/test_config_flow.py +++ b/tests/components/iron_os/test_config_flow.py @@ -4,6 +4,8 @@ from __future__ import annotations from unittest.mock import AsyncMock, MagicMock +import pytest + from homeassistant.components.iron_os import DOMAIN from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER from homeassistant.core import HomeAssistant @@ -11,9 +13,12 @@ from homeassistant.data_entry_flow import FlowResultType from .conftest import DEFAULT_NAME, PINECIL_SERVICE_INFO, USER_INPUT +from tests.common import MockConfigEntry -async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, discovery: MagicMock + +@pytest.mark.usefixtures("discovery") +async def test_async_step_user( + hass: HomeAssistant, mock_setup_entry: AsyncMock ) -> None: """Test the user config flow.""" result = await hass.config_entries.flow.async_init( @@ -32,10 +37,31 @@ async def test_form( assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("discovery") +async def test_async_step_user_device_added_between_steps( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test the device gets added via another flow between steps.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_setup_entry") async def test_form_no_device_discovered( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - discovery: MagicMock, + hass: HomeAssistant, discovery: MagicMock ) -> None: """Test setup with no device discoveries.""" discovery.return_value = [] @@ -48,7 +74,7 @@ async def test_form_no_device_discovered( async def test_async_step_bluetooth(hass: HomeAssistant) -> None: - """Test discovery via bluetooth..""" + """Test discovery via bluetooth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_BLUETOOTH}, @@ -64,3 +90,19 @@ async def test_async_step_bluetooth(hass: HomeAssistant) -> None: assert result["title"] == DEFAULT_NAME assert result["data"] == {} assert result["result"].unique_id == "c0:ff:ee:c0:ff:ee" + + +async def test_async_step_bluetooth_devices_already_setup( + hass: HomeAssistant, config_entry: AsyncMock +) -> None: + """Test we can't start a flow if there is already a config entry.""" + + config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_BLUETOOTH}, + data=PINECIL_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From 51bead32293308882d066b64c41bdf7ae22f7846 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Wed, 18 Dec 2024 19:34:49 +0100 Subject: [PATCH 438/677] Update number platform values before add in APSystems and add tests (#131938) Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/apsystems/number.py | 2 +- tests/components/apsystems/conftest.py | 1 + .../apsystems/snapshots/test_number.ambr | 58 ++++++++++++++++++ tests/components/apsystems/test_number.py | 61 +++++++++++++++++++ 4 files changed, 121 insertions(+), 1 deletion(-) create mode 100644 tests/components/apsystems/snapshots/test_number.ambr create mode 100644 tests/components/apsystems/test_number.py diff --git a/homeassistant/components/apsystems/number.py b/homeassistant/components/apsystems/number.py index 01e991f5188..6463d10f3e8 100644 --- a/homeassistant/components/apsystems/number.py +++ b/homeassistant/components/apsystems/number.py @@ -20,7 +20,7 @@ async def async_setup_entry( ) -> None: """Set up the sensor platform.""" - add_entities([ApSystemsMaxOutputNumber(config_entry.runtime_data)]) + add_entities([ApSystemsMaxOutputNumber(config_entry.runtime_data)], True) class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): diff --git a/tests/components/apsystems/conftest.py b/tests/components/apsystems/conftest.py index 0feccf21578..7a48ff7db3f 100644 --- a/tests/components/apsystems/conftest.py +++ b/tests/components/apsystems/conftest.py @@ -59,6 +59,7 @@ def mock_apsystems() -> Generator[MagicMock]: operating=False, ) mock_api.get_device_power_status.return_value = True + mock_api.get_max_power.return_value = 666 yield mock_api diff --git a/tests/components/apsystems/snapshots/test_number.ambr b/tests/components/apsystems/snapshots/test_number.ambr new file mode 100644 index 00000000000..a2b82e23596 --- /dev/null +++ b/tests/components/apsystems/snapshots/test_number.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_all_entities[number.mock_title_max_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 1000, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.mock_title_max_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max output', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_output', + 'unique_id': 'MY_SERIAL_NUMBER_output_limit', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.mock_title_max_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Mock Title Max output', + 'max': 1000, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.mock_title_max_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '666', + }) +# --- diff --git a/tests/components/apsystems/test_number.py b/tests/components/apsystems/test_number.py new file mode 100644 index 00000000000..5868bd3da34 --- /dev/null +++ b/tests/components/apsystems/test_number.py @@ -0,0 +1,61 @@ +"""Test the APSystem number module.""" + +import datetime +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +SCAN_INTERVAL = datetime.timedelta(seconds=30) + + +async def test_number( + hass: HomeAssistant, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test number command.""" + await setup_integration(hass, mock_config_entry) + entity_id = "number.mock_title_max_output" + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 50.1}, + target={ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_apsystems.set_max_power.assert_called_once_with(50) + mock_apsystems.get_max_power.return_value = 50 + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == "50" + + +@pytest.mark.usefixtures("mock_apsystems") +@patch("homeassistant.components.apsystems.PLATFORMS", [Platform.NUMBER]) +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) From 4daf6dd41d11443f3c0f36b18401fffc1f979768 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Wed, 18 Dec 2024 19:39:35 +0100 Subject: [PATCH 439/677] Bump gardena_bluetooth to 1.5.0 (#133502) --- homeassistant/components/gardena_bluetooth/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/gardena_bluetooth/manifest.json b/homeassistant/components/gardena_bluetooth/manifest.json index da5c08c38c5..28bba1015f5 100644 --- a/homeassistant/components/gardena_bluetooth/manifest.json +++ b/homeassistant/components/gardena_bluetooth/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/gardena_bluetooth", "iot_class": "local_polling", "loggers": ["bleak", "bleak_esphome", "gardena_bluetooth"], - "requirements": ["gardena-bluetooth==1.4.4"] + "requirements": ["gardena-bluetooth==1.5.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1b1938b2e4f..79f1411ea42 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -956,7 +956,7 @@ fyta_cli==0.7.0 gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.4 +gardena-bluetooth==1.5.0 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 93a7979600d..32e815babdd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -809,7 +809,7 @@ fyta_cli==0.7.0 gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.4 +gardena-bluetooth==1.5.0 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 From 0ff2a0d66db24887a6d5213a5268b36443322547 Mon Sep 17 00:00:00 2001 From: mvn23 Date: Wed, 18 Dec 2024 19:46:30 +0100 Subject: [PATCH 440/677] Add "cancel room setpoint override" button to opentherm_gw (#132162) --- .../components/opentherm_gw/button.py | 13 ++++++- .../components/opentherm_gw/strings.json | 5 +++ tests/components/opentherm_gw/test_button.py | 34 +++++++++++++++++++ 3 files changed, 51 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/opentherm_gw/button.py b/homeassistant/components/opentherm_gw/button.py index bac50295199..00b91ad33e0 100644 --- a/homeassistant/components/opentherm_gw/button.py +++ b/homeassistant/components/opentherm_gw/button.py @@ -16,7 +16,12 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import OpenThermGatewayHub -from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, GATEWAY_DEVICE_DESCRIPTION +from .const import ( + DATA_GATEWAYS, + DATA_OPENTHERM_GW, + GATEWAY_DEVICE_DESCRIPTION, + THERMOSTAT_DEVICE_DESCRIPTION, +) from .entity import OpenThermEntity, OpenThermEntityDescription @@ -30,6 +35,12 @@ class OpenThermButtonEntityDescription( BUTTON_DESCRIPTIONS: tuple[OpenThermButtonEntityDescription, ...] = ( + OpenThermButtonEntityDescription( + key="cancel_room_setpoint_override", + translation_key="cancel_room_setpoint_override", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + action=lambda hub: hub.set_room_setpoint(0), + ), OpenThermButtonEntityDescription( key="restart_button", device_class=ButtonDeviceClass.RESTART, diff --git a/homeassistant/components/opentherm_gw/strings.json b/homeassistant/components/opentherm_gw/strings.json index 834168eb113..4c452da41ae 100644 --- a/homeassistant/components/opentherm_gw/strings.json +++ b/homeassistant/components/opentherm_gw/strings.json @@ -158,6 +158,11 @@ "name": "Programmed change has priority over override" } }, + "button": { + "cancel_room_setpoint_override": { + "name": "Cancel room setpoint override" + } + }, "select": { "gpio_mode_n": { "name": "GPIO {gpio_id} mode", diff --git a/tests/components/opentherm_gw/test_button.py b/tests/components/opentherm_gw/test_button.py index b02a9d9fef0..d8de52559e7 100644 --- a/tests/components/opentherm_gw/test_button.py +++ b/tests/components/opentherm_gw/test_button.py @@ -16,6 +16,40 @@ from .conftest import MINIMAL_STATUS from tests.common import MockConfigEntry +async def test_cancel_room_setpoint_override_button( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, +) -> None: + """Test cancel room setpoint override button.""" + + mock_pyotgw.return_value.set_target_temp = AsyncMock(return_value=0) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + button_entity_id := entity_registry.async_get_entity_id( + BUTTON_DOMAIN, + OPENTHERM_DOMAIN, + f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-cancel_room_setpoint_override", + ) + ) is not None + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: button_entity_id, + }, + blocking=True, + ) + + mock_pyotgw.return_value.set_target_temp.assert_awaited_once_with(0, True) + + async def test_restart_button( hass: HomeAssistant, entity_registry: er.EntityRegistry, From 3a8b0b3ea6c3111b605c35c0dec1a6b8833df0af Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Wed, 18 Dec 2024 20:46:52 +0200 Subject: [PATCH 441/677] Use Switcher _async_call_api in climate (#133230) --- .../components/switcher_kis/climate.py | 25 +++---------------- .../components/switcher_kis/entity.py | 7 +++--- 2 files changed, 7 insertions(+), 25 deletions(-) diff --git a/homeassistant/components/switcher_kis/climate.py b/homeassistant/components/switcher_kis/climate.py index 5285e7549ef..2fc4a331676 100644 --- a/homeassistant/components/switcher_kis/climate.py +++ b/homeassistant/components/switcher_kis/climate.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any, cast -from aioswitcher.api import SwitcherApi, SwitcherBaseResponse from aioswitcher.api.remotes import SwitcherBreezeRemote from aioswitcher.device import ( DeviceCategory, @@ -38,6 +37,8 @@ from .coordinator import SwitcherDataUpdateCoordinator from .entity import SwitcherEntity from .utils import get_breeze_remote_manager +API_CONTROL_BREEZE_DEVICE = "control_breeze_device" + DEVICE_MODE_TO_HA = { ThermostatMode.COOL: HVACMode.COOL, ThermostatMode.HEAT: HVACMode.HEAT, @@ -155,27 +156,7 @@ class SwitcherClimateEntity(SwitcherEntity, ClimateEntity): async def _async_control_breeze_device(self, **kwargs: Any) -> None: """Call Switcher Control Breeze API.""" - response: SwitcherBaseResponse | None = None - error = None - - try: - async with SwitcherApi( - self.coordinator.data.device_type, - self.coordinator.data.ip_address, - self.coordinator.data.device_id, - self.coordinator.data.device_key, - ) as swapi: - response = await swapi.control_breeze_device(self._remote, **kwargs) - except (TimeoutError, OSError, RuntimeError) as err: - error = repr(err) - - if error or not response or not response.successful: - self.coordinator.last_update_success = False - self.async_write_ha_state() - raise HomeAssistantError( - f"Call Breeze control for {self.name} failed, " - f"response/error: {response or error}" - ) + await self._async_call_api(API_CONTROL_BREEZE_DEVICE, self._remote, **kwargs) async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" diff --git a/homeassistant/components/switcher_kis/entity.py b/homeassistant/components/switcher_kis/entity.py index e24f59a4a1c..82b892d548d 100644 --- a/homeassistant/components/switcher_kis/entity.py +++ b/homeassistant/components/switcher_kis/entity.py @@ -3,7 +3,8 @@ import logging from typing import Any -from aioswitcher.api import SwitcherApi, SwitcherBaseResponse +from aioswitcher.api import SwitcherApi +from aioswitcher.api.messages import SwitcherBaseResponse from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr @@ -27,7 +28,7 @@ class SwitcherEntity(CoordinatorEntity[SwitcherDataUpdateCoordinator]): connections={(dr.CONNECTION_NETWORK_MAC, coordinator.mac_address)} ) - async def _async_call_api(self, api: str, *args: Any) -> None: + async def _async_call_api(self, api: str, *args: Any, **kwargs: Any) -> None: """Call Switcher API.""" _LOGGER.debug("Calling api for %s, api: '%s', args: %s", self.name, api, args) response: SwitcherBaseResponse | None = None @@ -41,7 +42,7 @@ class SwitcherEntity(CoordinatorEntity[SwitcherDataUpdateCoordinator]): self.coordinator.data.device_key, self.coordinator.token, ) as swapi: - response = await getattr(swapi, api)(*args) + response = await getattr(swapi, api)(*args, **kwargs) except (TimeoutError, OSError, RuntimeError) as err: error = repr(err) From b7ff27122ad711316551a8d7ac07e62c41e0d7eb Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Wed, 18 Dec 2024 13:47:41 -0500 Subject: [PATCH 442/677] Add support for Nice G.O. HAE00080 wall station (#133186) --- homeassistant/components/nice_go/const.py | 4 ++-- homeassistant/components/nice_go/cover.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/nice_go/const.py b/homeassistant/components/nice_go/const.py index a6635368f7b..c02bcb3c234 100644 --- a/homeassistant/components/nice_go/const.py +++ b/homeassistant/components/nice_go/const.py @@ -15,8 +15,8 @@ CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time" REFRESH_TOKEN_EXPIRY_TIME = timedelta(days=30) SUPPORTED_DEVICE_TYPES = { - Platform.LIGHT: ["WallStation"], - Platform.SWITCH: ["WallStation"], + Platform.LIGHT: ["WallStation", "WallStation_ESP32"], + Platform.SWITCH: ["WallStation", "WallStation_ESP32"], } KNOWN_UNSUPPORTED_DEVICE_TYPES = { Platform.LIGHT: ["Mms100"], diff --git a/homeassistant/components/nice_go/cover.py b/homeassistant/components/nice_go/cover.py index a823e931804..6360e398b96 100644 --- a/homeassistant/components/nice_go/cover.py +++ b/homeassistant/components/nice_go/cover.py @@ -21,6 +21,7 @@ from .entity import NiceGOEntity DEVICE_CLASSES = { "WallStation": CoverDeviceClass.GARAGE, "Mms100": CoverDeviceClass.GATE, + "WallStation_ESP32": CoverDeviceClass.GARAGE, } PARALLEL_UPDATES = 1 From c8f050ecbcd60712da41b07ebae3ff8af43b4b1f Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Wed, 18 Dec 2024 20:08:57 +0100 Subject: [PATCH 443/677] Fix the local_file.update_file_path action's name and description (#133509) --- homeassistant/components/local_file/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/local_file/strings.json b/homeassistant/components/local_file/strings.json index abf31a6f94e..393cc5f2e46 100644 --- a/homeassistant/components/local_file/strings.json +++ b/homeassistant/components/local_file/strings.json @@ -39,8 +39,8 @@ }, "services": { "update_file_path": { - "name": "Updates file path", - "description": "Use this action to change the file displayed by the camera.", + "name": "Update file path", + "description": "Changes the file displayed by the camera.", "fields": { "file_path": { "name": "File path", From 19e6867f1ae078d84ff3e1973d703c4b86504b89 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 18 Dec 2024 20:22:33 +0100 Subject: [PATCH 444/677] Reolink translate errors (#132301) --- homeassistant/components/reolink/__init__.py | 4 +- homeassistant/components/reolink/button.py | 26 ++-- homeassistant/components/reolink/camera.py | 14 +-- homeassistant/components/reolink/light.py | 47 +++---- homeassistant/components/reolink/number.py | 28 ++--- .../components/reolink/quality_scale.yaml | 2 +- homeassistant/components/reolink/select.py | 20 +-- homeassistant/components/reolink/services.py | 13 +- homeassistant/components/reolink/siren.py | 26 +--- homeassistant/components/reolink/strings.json | 39 ++++++ homeassistant/components/reolink/switch.py | 40 ++---- homeassistant/components/reolink/update.py | 5 +- homeassistant/components/reolink/util.py | 104 ++++++++++++++++ tests/components/reolink/test_util.py | 115 ++++++++++++++++++ 14 files changed, 331 insertions(+), 152 deletions(-) create mode 100644 tests/components/reolink/test_util.py diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index ae0badb3d84..29dfb4ee57b 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -73,7 +73,9 @@ async def async_setup_entry( ) as err: await host.stop() raise ConfigEntryNotReady( - f"Error while trying to setup {host.api.host}:{host.api.port}: {err!s}" + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + translation_placeholders={"host": host.api.host, "err": str(err)}, ) from err except BaseException: await host.stop() diff --git a/homeassistant/components/reolink/button.py b/homeassistant/components/reolink/button.py index cd1e1b05fae..6b1fcc65a2f 100644 --- a/homeassistant/components/reolink/button.py +++ b/homeassistant/components/reolink/button.py @@ -7,7 +7,6 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import GuardEnum, Host, PtzEnum -from reolink_aio.exceptions import ReolinkError import voluptuous as vol from homeassistant.components.button import ( @@ -18,7 +17,6 @@ from homeassistant.components.button import ( from homeassistant.components.camera import CameraEntityFeature from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, @@ -31,7 +29,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 ATTR_SPEED = "speed" @@ -205,22 +203,18 @@ class ReolinkButtonEntity(ReolinkChannelCoordinatorEntity, ButtonEntity): ): self._attr_supported_features = SUPPORT_PTZ_SPEED + @raise_translated_error async def async_press(self) -> None: """Execute the button action.""" - try: - await self.entity_description.method(self._host.api, self._channel) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, self._channel) + @raise_translated_error async def async_ptz_move(self, **kwargs: Any) -> None: """PTZ move with speed.""" speed = kwargs[ATTR_SPEED] - try: - await self._host.api.set_ptz_command( - self._channel, command=self.entity_description.ptz_cmd, speed=speed - ) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self._host.api.set_ptz_command( + self._channel, command=self.entity_description.ptz_cmd, speed=speed + ) class ReolinkHostButtonEntity(ReolinkHostCoordinatorEntity, ButtonEntity): @@ -237,9 +231,7 @@ class ReolinkHostButtonEntity(ReolinkHostCoordinatorEntity, ButtonEntity): self.entity_description = entity_description super().__init__(reolink_data) + @raise_translated_error async def async_press(self) -> None: """Execute the button action.""" - try: - await self.entity_description.method(self._host.api) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api) diff --git a/homeassistant/components/reolink/camera.py b/homeassistant/components/reolink/camera.py index 26ef0b0f4fc..d9b3cb67f70 100644 --- a/homeassistant/components/reolink/camera.py +++ b/homeassistant/components/reolink/camera.py @@ -6,7 +6,6 @@ from dataclasses import dataclass import logging from reolink_aio.api import DUAL_LENS_MODELS -from reolink_aio.exceptions import ReolinkError from homeassistant.components.camera import ( Camera, @@ -14,11 +13,10 @@ from homeassistant.components.camera import ( CameraEntityFeature, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 @@ -142,13 +140,11 @@ class ReolinkCamera(ReolinkChannelCoordinatorEntity, Camera): self._channel, self.entity_description.stream ) + @raise_translated_error async def async_camera_image( self, width: int | None = None, height: int | None = None ) -> bytes | None: """Return a still image response from the camera.""" - try: - return await self._host.api.get_snapshot( - self._channel, self.entity_description.stream - ) - except ReolinkError as err: - raise HomeAssistantError(err) from err + return await self._host.api.get_snapshot( + self._channel, self.entity_description.stream + ) diff --git a/homeassistant/components/reolink/light.py b/homeassistant/components/reolink/light.py index 3bd9a120798..bbb9592dd76 100644 --- a/homeassistant/components/reolink/light.py +++ b/homeassistant/components/reolink/light.py @@ -7,7 +7,6 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import Host -from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -17,7 +16,6 @@ from homeassistant.components.light import ( ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ( @@ -26,7 +24,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 @@ -154,37 +152,28 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity): return round(255 * bright_pct / 100.0) + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn light off.""" - try: - await self.entity_description.turn_on_off_fn( - self._host.api, self._channel, False - ) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.turn_on_off_fn( + self._host.api, self._channel, False + ) self.async_write_ha_state() + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn light on.""" if ( brightness := kwargs.get(ATTR_BRIGHTNESS) ) is not None and self.entity_description.set_brightness_fn is not None: brightness_pct = int(brightness / 255.0 * 100) - try: - await self.entity_description.set_brightness_fn( - self._host.api, self._channel, brightness_pct - ) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err - - try: - await self.entity_description.turn_on_off_fn( - self._host.api, self._channel, True + await self.entity_description.set_brightness_fn( + self._host.api, self._channel, brightness_pct ) - except ReolinkError as err: - raise HomeAssistantError(err) from err + + await self.entity_description.turn_on_off_fn( + self._host.api, self._channel, True + ) self.async_write_ha_state() @@ -209,18 +198,14 @@ class ReolinkHostLightEntity(ReolinkHostCoordinatorEntity, LightEntity): """Return true if light is on.""" return self.entity_description.is_on_fn(self._host.api) + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn light off.""" - try: - await self.entity_description.turn_on_off_fn(self._host.api, False) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.turn_on_off_fn(self._host.api, False) self.async_write_ha_state() + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn light on.""" - try: - await self.entity_description.turn_on_off_fn(self._host.api, True) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.turn_on_off_fn(self._host.api, True) self.async_write_ha_state() diff --git a/homeassistant/components/reolink/number.py b/homeassistant/components/reolink/number.py index 692b43bca9e..e4b52c85d45 100644 --- a/homeassistant/components/reolink/number.py +++ b/homeassistant/components/reolink/number.py @@ -7,7 +7,6 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import Chime, Host -from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.number import ( NumberEntity, @@ -16,7 +15,6 @@ from homeassistant.components.number import ( ) from homeassistant.const import EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ( @@ -27,7 +25,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 @@ -589,14 +587,10 @@ class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): """State of the number entity.""" return self.entity_description.value(self._host.api, self._channel) + @raise_translated_error async def async_set_native_value(self, value: float) -> None: """Update the current value.""" - try: - await self.entity_description.method(self._host.api, self._channel, value) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, self._channel, value) self.async_write_ha_state() @@ -621,14 +615,10 @@ class ReolinkHostNumberEntity(ReolinkHostCoordinatorEntity, NumberEntity): """State of the number entity.""" return self.entity_description.value(self._host.api) + @raise_translated_error async def async_set_native_value(self, value: float) -> None: """Update the current value.""" - try: - await self.entity_description.method(self._host.api, value) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, value) self.async_write_ha_state() @@ -654,12 +644,8 @@ class ReolinkChimeNumberEntity(ReolinkChimeCoordinatorEntity, NumberEntity): """State of the number entity.""" return self.entity_description.value(self._chime) + @raise_translated_error async def async_set_native_value(self, value: float) -> None: """Update the current value.""" - try: - await self.entity_description.method(self._chime, value) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._chime, value) self.async_write_ha_state() diff --git a/homeassistant/components/reolink/quality_scale.yaml b/homeassistant/components/reolink/quality_scale.yaml index 540cf19e22a..5cc054b7a4c 100644 --- a/homeassistant/components/reolink/quality_scale.yaml +++ b/homeassistant/components/reolink/quality_scale.yaml @@ -54,7 +54,7 @@ rules: entity-device-class: done entity-disabled-by-default: done entity-translations: done - exception-translations: todo + exception-translations: done icon-translations: done reconfiguration-flow: done repair-issues: done diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py index 8625f7fb600..7a74be2e28c 100644 --- a/homeassistant/components/reolink/select.py +++ b/homeassistant/components/reolink/select.py @@ -19,12 +19,10 @@ from reolink_aio.api import ( StatusLedEnum, TrackMethodEnum, ) -from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.const import EntityCategory, UnitOfDataRate, UnitOfFrequency from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ( @@ -33,7 +31,7 @@ from .entity import ( ReolinkChimeCoordinatorEntity, ReolinkChimeEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 @@ -354,14 +352,10 @@ class ReolinkSelectEntity(ReolinkChannelCoordinatorEntity, SelectEntity): self._log_error = True return option + @raise_translated_error async def async_select_option(self, option: str) -> None: """Change the selected option.""" - try: - await self.entity_description.method(self._host.api, self._channel, option) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, self._channel, option) self.async_write_ha_state() @@ -396,12 +390,8 @@ class ReolinkChimeSelectEntity(ReolinkChimeCoordinatorEntity, SelectEntity): self._log_error = True return option + @raise_translated_error async def async_select_option(self, option: str) -> None: """Change the selected option.""" - try: - await self.entity_description.method(self._chime, option) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._chime, option) self.async_write_ha_state() diff --git a/homeassistant/components/reolink/services.py b/homeassistant/components/reolink/services.py index 326093e7a93..acd31fe0d7d 100644 --- a/homeassistant/components/reolink/services.py +++ b/homeassistant/components/reolink/services.py @@ -4,18 +4,17 @@ from __future__ import annotations from reolink_aio.api import Chime from reolink_aio.enums import ChimeToneEnum -from reolink_aio.exceptions import InvalidParameterError, ReolinkError import voluptuous as vol from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_DEVICE_ID from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr from .const import DOMAIN from .host import ReolinkHost -from .util import get_device_uid_and_ch +from .util import get_device_uid_and_ch, raise_translated_error ATTR_RINGTONE = "ringtone" @@ -24,6 +23,7 @@ ATTR_RINGTONE = "ringtone" def async_setup_services(hass: HomeAssistant) -> None: """Set up Reolink services.""" + @raise_translated_error async def async_play_chime(service_call: ServiceCall) -> None: """Play a ringtone.""" service_data = service_call.data @@ -58,12 +58,7 @@ def async_setup_services(hass: HomeAssistant) -> None: ) ringtone = service_data[ATTR_RINGTONE] - try: - await chime.play(ChimeToneEnum[ringtone].value) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await chime.play(ChimeToneEnum[ringtone].value) hass.services.async_register( DOMAIN, diff --git a/homeassistant/components/reolink/siren.py b/homeassistant/components/reolink/siren.py index cb12eb5d38c..74bb227d078 100644 --- a/homeassistant/components/reolink/siren.py +++ b/homeassistant/components/reolink/siren.py @@ -5,8 +5,6 @@ from __future__ import annotations from dataclasses import dataclass from typing import Any -from reolink_aio.exceptions import InvalidParameterError, ReolinkError - from homeassistant.components.siren import ( ATTR_DURATION, ATTR_VOLUME_LEVEL, @@ -15,11 +13,10 @@ from homeassistant.components.siren import ( SirenEntityFeature, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 @@ -77,26 +74,15 @@ class ReolinkSirenEntity(ReolinkChannelCoordinatorEntity, SirenEntity): self.entity_description = entity_description super().__init__(reolink_data, channel) + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the siren.""" if (volume := kwargs.get(ATTR_VOLUME_LEVEL)) is not None: - try: - await self._host.api.set_volume(self._channel, int(volume * 100)) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self._host.api.set_volume(self._channel, int(volume * 100)) duration = kwargs.get(ATTR_DURATION) - try: - await self._host.api.set_siren(self._channel, True, duration) - except InvalidParameterError as err: - raise ServiceValidationError(err) from err - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self._host.api.set_siren(self._channel, True, duration) + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the siren.""" - try: - await self._host.api.set_siren(self._channel, False, None) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self._host.api.set_siren(self._channel, False, None) diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index ac73581ce22..53152131bdb 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -55,6 +55,45 @@ }, "service_not_chime": { "message": "Reolink play_chime error: {device_name} is not a chime" + }, + "invalid_parameter": { + "message": "Invalid input parameter: {err}" + }, + "api_error": { + "message": "The device responded with a error: {err}" + }, + "invalid_content_type": { + "message": "Received a different content type than expected: {err}" + }, + "invalid_credentials": { + "message": "Invalid credentials: {err}" + }, + "login_error": { + "message": "Error during login attempt: {err}" + }, + "no_data": { + "message": "Device returned no data: {err}" + }, + "unexpected_data": { + "message": "Device returned unexpected data: {err}" + }, + "not_supported": { + "message": "Function not supported by this device: {err}" + }, + "subscription_error": { + "message": "Error during ONVIF subscription: {err}" + }, + "connection_error": { + "message": "Could not connect to the device: {err}" + }, + "timeout": { + "message": "Timeout waiting on a response: {err}" + }, + "firmware_install_error": { + "message": "Error trying to update Reolink firmware: {err}" + }, + "config_entry_not_ready": { + "message": "Error while trying to setup {host}: {err}" } }, "issues": { diff --git a/homeassistant/components/reolink/switch.py b/homeassistant/components/reolink/switch.py index c274609599d..b970d04c257 100644 --- a/homeassistant/components/reolink/switch.py +++ b/homeassistant/components/reolink/switch.py @@ -7,12 +7,10 @@ from dataclasses import dataclass from typing import Any from reolink_aio.api import Chime, Host -from reolink_aio.exceptions import ReolinkError from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -25,7 +23,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 @@ -430,20 +428,16 @@ class ReolinkSwitchEntity(ReolinkChannelCoordinatorEntity, SwitchEntity): """Return true if switch is on.""" return self.entity_description.value(self._host.api, self._channel) + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - try: - await self.entity_description.method(self._host.api, self._channel, True) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, self._channel, True) self.async_write_ha_state() + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - try: - await self.entity_description.method(self._host.api, self._channel, False) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, self._channel, False) self.async_write_ha_state() @@ -466,20 +460,16 @@ class ReolinkNVRSwitchEntity(ReolinkHostCoordinatorEntity, SwitchEntity): """Return true if switch is on.""" return self.entity_description.value(self._host.api) + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - try: - await self.entity_description.method(self._host.api, True) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, True) self.async_write_ha_state() + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - try: - await self.entity_description.method(self._host.api, False) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._host.api, False) self.async_write_ha_state() @@ -503,18 +493,14 @@ class ReolinkChimeSwitchEntity(ReolinkChimeCoordinatorEntity, SwitchEntity): """Return true if switch is on.""" return self.entity_description.value(self._chime) + @raise_translated_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - try: - await self.entity_description.method(self._chime, True) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._chime, True) self.async_write_ha_state() + @raise_translated_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - try: - await self.entity_description.method(self._chime, False) - except ReolinkError as err: - raise HomeAssistantError(err) from err + await self.entity_description.method(self._chime, False) self.async_write_ha_state() diff --git a/homeassistant/components/reolink/update.py b/homeassistant/components/reolink/update.py index aa607e2b29e..5a8c7d7dc08 100644 --- a/homeassistant/components/reolink/update.py +++ b/homeassistant/components/reolink/update.py @@ -24,6 +24,7 @@ from homeassistant.helpers.update_coordinator import ( ) from . import DEVICE_UPDATE_INTERVAL +from .const import DOMAIN from .entity import ( ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription, @@ -196,7 +197,9 @@ class ReolinkUpdateBaseEntity( await self._host.api.update_firmware(self._channel) except ReolinkError as err: raise HomeAssistantError( - f"Error trying to update Reolink firmware: {err}" + translation_domain=DOMAIN, + translation_key="firmware_install_error", + translation_placeholders={"err": str(err)}, ) from err finally: self.async_write_ha_state() diff --git a/homeassistant/components/reolink/util.py b/homeassistant/components/reolink/util.py index 98c0e7b925b..1a6eab3f61d 100644 --- a/homeassistant/components/reolink/util.py +++ b/homeassistant/components/reolink/util.py @@ -2,10 +2,28 @@ from __future__ import annotations +from collections.abc import Awaitable, Callable, Coroutine from dataclasses import dataclass +from typing import Any, ParamSpec, TypeVar + +from reolink_aio.exceptions import ( + ApiError, + CredentialsInvalidError, + InvalidContentTypeError, + InvalidParameterError, + LoginError, + NoDataError, + NotSupportedError, + ReolinkConnectionError, + ReolinkError, + ReolinkTimeoutError, + SubscriptionError, + UnexpectedDataError, +) from homeassistant import config_entries from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -53,3 +71,89 @@ def get_device_uid_and_ch( else: ch = host.api.channel_for_uid(device_uid[1]) return (device_uid, ch, is_chime) + + +T = TypeVar("T") +P = ParamSpec("P") + + +# Decorators +def raise_translated_error( + func: Callable[P, Awaitable[T]], +) -> Callable[P, Coroutine[Any, Any, T]]: + """Wrap a reolink-aio function to translate any potential errors.""" + + async def decorator_raise_translated_error(*args: P.args, **kwargs: P.kwargs) -> T: + """Try a reolink-aio function and translate any potential errors.""" + try: + return await func(*args, **kwargs) + except InvalidParameterError as err: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_parameter", + translation_placeholders={"err": str(err)}, + ) from err + except ApiError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="api_error", + translation_placeholders={"err": str(err)}, + ) from err + except InvalidContentTypeError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="invalid_content_type", + translation_placeholders={"err": str(err)}, + ) from err + except CredentialsInvalidError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="invalid_credentials", + translation_placeholders={"err": str(err)}, + ) from err + except LoginError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="login_error", + translation_placeholders={"err": str(err)}, + ) from err + except NoDataError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="no_data", + translation_placeholders={"err": str(err)}, + ) from err + except UnexpectedDataError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unexpected_data", + translation_placeholders={"err": str(err)}, + ) from err + except NotSupportedError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="not_supported", + translation_placeholders={"err": str(err)}, + ) from err + except SubscriptionError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="subscription_error", + translation_placeholders={"err": str(err)}, + ) from err + except ReolinkConnectionError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="connection_error", + translation_placeholders={"err": str(err)}, + ) from err + except ReolinkTimeoutError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="timeout", + translation_placeholders={"err": str(err)}, + ) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err + + return decorator_raise_translated_error diff --git a/tests/components/reolink/test_util.py b/tests/components/reolink/test_util.py new file mode 100644 index 00000000000..f66f4682b98 --- /dev/null +++ b/tests/components/reolink/test_util.py @@ -0,0 +1,115 @@ +"""Test the Reolink util functions.""" + +from unittest.mock import MagicMock, patch + +import pytest +from reolink_aio.exceptions import ( + ApiError, + CredentialsInvalidError, + InvalidContentTypeError, + InvalidParameterError, + LoginError, + NoDataError, + NotSupportedError, + ReolinkConnectionError, + ReolinkError, + ReolinkTimeoutError, + SubscriptionError, + UnexpectedDataError, +) + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("side_effect", "expected"), + [ + ( + ApiError("Test error"), + HomeAssistantError, + ), + ( + CredentialsInvalidError("Test error"), + HomeAssistantError, + ), + ( + InvalidContentTypeError("Test error"), + HomeAssistantError, + ), + ( + InvalidParameterError("Test error"), + ServiceValidationError, + ), + ( + LoginError("Test error"), + HomeAssistantError, + ), + ( + NoDataError("Test error"), + HomeAssistantError, + ), + ( + NotSupportedError("Test error"), + HomeAssistantError, + ), + ( + ReolinkConnectionError("Test error"), + HomeAssistantError, + ), + ( + ReolinkError("Test error"), + HomeAssistantError, + ), + ( + ReolinkTimeoutError("Test error"), + HomeAssistantError, + ), + ( + SubscriptionError("Test error"), + HomeAssistantError, + ), + ( + UnexpectedDataError("Test error"), + HomeAssistantError, + ), + ], +) +async def test_try_function( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + side_effect: ReolinkError, + expected: Exception, +) -> None: + """Test try_function error translations using number entity.""" + reolink_connect.volume.return_value = 80 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_volume" + + reolink_connect.set_volume.side_effect = side_effect + with pytest.raises(expected): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + reolink_connect.set_volume.reset_mock(side_effect=True) From 8a8be71f96f57cc67f43d9335d6e33af5bc9c96a Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Wed, 18 Dec 2024 20:53:05 +0100 Subject: [PATCH 445/677] Add tests for cover and increase test coverage for slide_local (#133515) --- .../components/slide_local/quality_scale.yaml | 2 +- .../slide_local/snapshots/test_cover.ambr | 51 +++++ tests/components/slide_local/test_cover.py | 215 ++++++++++++++++++ tests/components/slide_local/test_init.py | 36 +++ 4 files changed, 303 insertions(+), 1 deletion(-) create mode 100644 tests/components/slide_local/snapshots/test_cover.ambr create mode 100644 tests/components/slide_local/test_cover.py diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 4833f19e2b2..c3ce12efd80 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -28,7 +28,7 @@ rules: action-exceptions: done reauthentication-flow: todo parallel-updates: done - test-coverage: todo + test-coverage: done integration-owner: done docs-installation-parameters: done docs-configuration-parameters: done diff --git a/tests/components/slide_local/snapshots/test_cover.ambr b/tests/components/slide_local/snapshots/test_cover.ambr new file mode 100644 index 00000000000..d9283618a47 --- /dev/null +++ b/tests/components/slide_local/snapshots/test_cover.ambr @@ -0,0 +1,51 @@ +# serializer version: 1 +# name: test_all_entities[cover.slide_bedroom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.slide_bedroom', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'slide_local', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1234567890ab', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[cover.slide_bedroom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assumed_state': True, + 'current_position': 100, + 'device_class': 'curtain', + 'friendly_name': 'slide bedroom', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.slide_bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/slide_local/test_cover.py b/tests/components/slide_local/test_cover.py new file mode 100644 index 00000000000..e0e4a0741d8 --- /dev/null +++ b/tests/components/slide_local/test_cover.py @@ -0,0 +1,215 @@ +"""Tests for the Slide Local cover platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from goslideapi.goslideapi import ClientConnectionError +from syrupy import SnapshotAssertion + +from homeassistant.components.cover import ( + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_SET_COVER_POSITION, + SERVICE_STOP_COVER, + CoverState, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform +from .const import SLIDE_INFO_DATA + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_connection_error( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection error.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + mock_slide_api.slide_info.side_effect = [ClientConnectionError, SLIDE_INFO_DATA] + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == STATE_UNAVAILABLE + + freezer.tick(delta=timedelta(minutes=2)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.OPEN + + +async def test_state_change( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection error.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + mock_slide_api.slide_info.side_effect = [ + dict(SLIDE_INFO_DATA, pos=0.0), + dict(SLIDE_INFO_DATA, pos=0.4), + dict(SLIDE_INFO_DATA, pos=1.0), + dict(SLIDE_INFO_DATA, pos=0.8), + ] + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.OPEN + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.CLOSING + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.CLOSED + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.OPENING + + +async def test_open_cover( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test open cover.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + { + ATTR_ENTITY_ID: "cover.slide_bedroom", + }, + blocking=True, + ) + mock_slide_api.slide_open.assert_called_once() + + +async def test_close_cover( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test close cover.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + { + ATTR_ENTITY_ID: "cover.slide_bedroom", + }, + blocking=True, + ) + mock_slide_api.slide_close.assert_called_once() + + +async def test_stop_cover( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test stop cover.""" + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + { + ATTR_ENTITY_ID: "cover.slide_bedroom", + }, + blocking=True, + ) + mock_slide_api.slide_stop.assert_called_once() + + +async def test_set_position( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test set cover position.""" + + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + + mock_slide_api.slide_info.side_effect = [ + dict(SLIDE_INFO_DATA, pos=0.0), + dict(SLIDE_INFO_DATA, pos=1.0), + dict(SLIDE_INFO_DATA, pos=1.0), + dict(SLIDE_INFO_DATA, pos=0.0), + ] + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: "cover.slide_bedroom", ATTR_POSITION: 1.0}, + blocking=True, + ) + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.CLOSED + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: "cover.slide_bedroom", ATTR_POSITION: 0.0}, + blocking=True, + ) + + freezer.tick(delta=timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("cover.slide_bedroom").state == CoverState.OPEN + + assert len(mock_slide_api.slide_set_position.mock_calls) == 2 diff --git a/tests/components/slide_local/test_init.py b/tests/components/slide_local/test_init.py index 7b0a2d83164..ec9a12f9eeb 100644 --- a/tests/components/slide_local/test_init.py +++ b/tests/components/slide_local/test_init.py @@ -2,8 +2,10 @@ from unittest.mock import AsyncMock +from goslideapi.goslideapi import ClientConnectionError from syrupy import SnapshotAssertion +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -27,3 +29,37 @@ async def test_device_info( ) assert device_entry is not None assert device_entry == snapshot + + +async def test_raise_config_entry_not_ready_when_offline( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_slide_api: AsyncMock, +) -> None: + """Config entry state is SETUP_RETRY when slide is offline.""" + + mock_slide_api.slide_info.side_effect = [ClientConnectionError, None] + + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + assert len(hass.config_entries.flow.async_progress()) == 0 + + +async def test_raise_config_entry_not_ready_when_empty_data( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_slide_api: AsyncMock, +) -> None: + """Config entry state is SETUP_RETRY when slide is offline.""" + + mock_slide_api.slide_info.return_value = None + + await setup_platform(hass, mock_config_entry, [Platform.COVER]) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + assert len(hass.config_entries.flow.async_progress()) == 0 From ff8bc763c3f667039b3034d972c871e7439a6833 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 18 Dec 2024 21:29:52 +0100 Subject: [PATCH 446/677] Ensure indices needed by data migrators exist (#133367) * Ensure indices needed by data migrators exist * Update test * Improve test * Ignore index error on char(0) columns * Adjust tests * Address review comments * Add comment motivating magic number --- .../components/recorder/db_schema.py | 38 ++- .../components/recorder/migration.py | 223 ++++++++++++++---- tests/components/recorder/test_migrate.py | 8 +- ..._migration_run_time_migrations_remember.py | 203 +++++++++------- .../recorder/test_purge_v32_schema.py | 4 +- 5 files changed, 340 insertions(+), 136 deletions(-) diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index fb57a1c73e2..fa4162f4183 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -6,7 +6,7 @@ from collections.abc import Callable from datetime import datetime, timedelta import logging import time -from typing import Any, Self, cast +from typing import Any, Final, Self, cast import ciso8601 from fnv_hash_fast import fnv1a_32 @@ -130,7 +130,8 @@ METADATA_ID_LAST_UPDATED_INDEX_TS = "ix_states_metadata_id_last_updated_ts" EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin" STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id" -LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated_ts" +LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX = "ix_states_entity_id_last_updated_ts" +LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID: Final = 36 CONTEXT_ID_BIN_MAX_LENGTH = 16 MYSQL_COLLATE = "utf8mb4_unicode_ci" @@ -350,6 +351,17 @@ class Events(Base): return None +class LegacyEvents(LegacyBase): + """Event history data with event_id, used for schema migration.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_EVENTS + event_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True) + context_id: Mapped[str | None] = mapped_column( + String(LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID), index=True + ) + + class EventData(Base): """Event data history.""" @@ -575,6 +587,28 @@ class States(Base): ) +class LegacyStates(LegacyBase): + """State change history with entity_id, used for schema migration.""" + + __table_args__ = ( + Index( + LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX, + "entity_id", + "last_updated_ts", + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_STATES + state_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True) + entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + last_updated_ts: Mapped[float | None] = mapped_column( + TIMESTAMP_TYPE, default=time.time, index=True + ) + context_id: Mapped[str | None] = mapped_column( + String(LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID), index=True + ) + + class StateAttributes(Base): """State attribute change history.""" diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 74e3b08f51c..33790ec65b2 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -23,6 +23,7 @@ from sqlalchemy.exc import ( ProgrammingError, SQLAlchemyError, ) +from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm.session import Session from sqlalchemy.schema import AddConstraint, CreateTable, DropConstraint from sqlalchemy.sql.expression import true @@ -59,7 +60,7 @@ from .db_schema import ( BIG_INTEGER_SQL, CONTEXT_ID_BIN_MAX_LENGTH, DOUBLE_PRECISION_TYPE_SQL, - LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX, + LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX, LEGACY_STATES_EVENT_ID_INDEX, MYSQL_COLLATE, MYSQL_DEFAULT_CHARSET, @@ -169,6 +170,24 @@ _COLUMN_TYPES_FOR_DIALECT: dict[SupportedDialect | None, _ColumnTypesForDialect] } +def _unindexable_legacy_column( + instance: Recorder, base: type[DeclarativeBase], err: Exception +) -> bool: + """Ignore index errors on char(0) columns.""" + # The error code is hard coded because the PyMySQL library may not be + # installed when using other database engines than MySQL or MariaDB. + # 1167: The used storage engine can't index column '%s' + return bool( + base == LegacyBase + and isinstance(err, OperationalError) + and instance.engine + and instance.engine.dialect.name == SupportedDialect.MYSQL + and isinstance(err.orig, BaseException) + and err.orig.args + and err.orig.args[0] == 1167 + ) + + def raise_if_exception_missing_str(ex: Exception, match_substrs: Iterable[str]) -> None: """Raise if the exception and cause do not contain the match substrs.""" lower_ex_strs = [str(ex).lower(), str(ex.__cause__).lower()] @@ -471,14 +490,19 @@ def migrate_data_live( def _create_index( - session_maker: Callable[[], Session], table_name: str, index_name: str + instance: Recorder, + session_maker: Callable[[], Session], + table_name: str, + index_name: str, + *, + base: type[DeclarativeBase] = Base, ) -> None: """Create an index for the specified table. The index name should match the name given for the index within the table definition described in the models """ - table = Table(table_name, Base.metadata) + table = Table(table_name, base.metadata) _LOGGER.debug("Looking up index %s for table %s", index_name, table_name) # Look up the index object by name from the table is the models index_list = [idx for idx in table.indexes if idx.name == index_name] @@ -498,10 +522,18 @@ def _create_index( connection = session.connection() index.create(connection) except (InternalError, OperationalError, ProgrammingError) as err: + if _unindexable_legacy_column(instance, base, err): + _LOGGER.debug( + "Can't add legacy index %s to column %s, continuing", + index_name, + table_name, + ) + return raise_if_exception_missing_str(err, ["already exists", "duplicate"]) _LOGGER.warning( "Index %s already exists on %s, continuing", index_name, table_name ) + return _LOGGER.warning("Finished adding index `%s` to table `%s`", index_name, table_name) @@ -1040,7 +1072,12 @@ class _SchemaVersion2Migrator(_SchemaVersionMigrator, target_version=2): def _apply_update(self) -> None: """Version specific update method.""" # Create compound start/end index for recorder_runs - _create_index(self.session_maker, "recorder_runs", "ix_recorder_runs_start_end") + _create_index( + self.instance, + self.session_maker, + "recorder_runs", + "ix_recorder_runs_start_end", + ) # This used to create ix_states_last_updated bit it was removed in version 32 @@ -1075,7 +1112,9 @@ class _SchemaVersion5Migrator(_SchemaVersionMigrator, target_version=5): def _apply_update(self) -> None: """Version specific update method.""" # Create supporting index for States.event_id foreign key - _create_index(self.session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) + _create_index( + self.instance, self.session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX + ) class _SchemaVersion6Migrator(_SchemaVersionMigrator, target_version=6): @@ -1086,7 +1125,9 @@ class _SchemaVersion6Migrator(_SchemaVersionMigrator, target_version=6): "events", ["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"], ) - _create_index(self.session_maker, "events", "ix_events_context_id") + _create_index( + self.instance, self.session_maker, "events", "ix_events_context_id" + ) # This used to create ix_events_context_user_id, # but it was removed in version 28 _add_columns( @@ -1094,7 +1135,9 @@ class _SchemaVersion6Migrator(_SchemaVersionMigrator, target_version=6): "states", ["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"], ) - _create_index(self.session_maker, "states", "ix_states_context_id") + _create_index( + self.instance, self.session_maker, "states", "ix_states_context_id" + ) # This used to create ix_states_context_user_id, # but it was removed in version 28 @@ -1148,7 +1191,9 @@ class _SchemaVersion10Migrator(_SchemaVersionMigrator, target_version=10): class _SchemaVersion11Migrator(_SchemaVersionMigrator, target_version=11): def _apply_update(self) -> None: """Version specific update method.""" - _create_index(self.session_maker, "states", "ix_states_old_state_id") + _create_index( + self.instance, self.session_maker, "states", "ix_states_old_state_id" + ) # _update_states_table_with_foreign_key_options first drops foreign # key constraints, and then re-adds them with the correct settings. @@ -1390,13 +1435,20 @@ class _SchemaVersion25Migrator(_SchemaVersionMigrator, target_version=25): "states", [f"attributes_id {self.column_types.big_int_type}"], ) - _create_index(self.session_maker, "states", "ix_states_attributes_id") + _create_index( + self.instance, self.session_maker, "states", "ix_states_attributes_id" + ) class _SchemaVersion26Migrator(_SchemaVersionMigrator, target_version=26): def _apply_update(self) -> None: """Version specific update method.""" - _create_index(self.session_maker, "statistics_runs", "ix_statistics_runs_start") + _create_index( + self.instance, + self.session_maker, + "statistics_runs", + "ix_statistics_runs_start", + ) class _SchemaVersion27Migrator(_SchemaVersionMigrator, target_version=27): @@ -1405,7 +1457,7 @@ class _SchemaVersion27Migrator(_SchemaVersionMigrator, target_version=27): _add_columns( self.session_maker, "events", [f"data_id {self.column_types.big_int_type}"] ) - _create_index(self.session_maker, "events", "ix_events_data_id") + _create_index(self.instance, self.session_maker, "events", "ix_events_data_id") class _SchemaVersion28Migrator(_SchemaVersionMigrator, target_version=28): @@ -1425,7 +1477,9 @@ class _SchemaVersion28Migrator(_SchemaVersionMigrator, target_version=28): "context_parent_id VARCHAR(36)", ], ) - _create_index(self.session_maker, "states", "ix_states_context_id") + _create_index( + self.instance, self.session_maker, "states", "ix_states_context_id" + ) # Once there are no longer any state_changed events # in the events table we can drop the index on states.event_id @@ -1452,7 +1506,10 @@ class _SchemaVersion29Migrator(_SchemaVersionMigrator, target_version=29): ) try: _create_index( - self.session_maker, "statistics_meta", "ix_statistics_meta_statistic_id" + self.instance, + self.session_maker, + "statistics_meta", + "ix_statistics_meta_statistic_id", ) except DatabaseError: # There may be duplicated statistics_meta entries, delete duplicates @@ -1460,7 +1517,10 @@ class _SchemaVersion29Migrator(_SchemaVersionMigrator, target_version=29): with session_scope(session=self.session_maker()) as session: delete_statistics_meta_duplicates(self.instance, session) _create_index( - self.session_maker, "statistics_meta", "ix_statistics_meta_statistic_id" + self.instance, + self.session_maker, + "statistics_meta", + "ix_statistics_meta_statistic_id", ) @@ -1494,14 +1554,24 @@ class _SchemaVersion31Migrator(_SchemaVersionMigrator, target_version=31): f"last_changed_ts {self.column_types.timestamp_type}", ], ) - _create_index(self.session_maker, "events", "ix_events_time_fired_ts") _create_index( - self.session_maker, "events", "ix_events_event_type_time_fired_ts" + self.instance, self.session_maker, "events", "ix_events_time_fired_ts" ) _create_index( - self.session_maker, "states", "ix_states_entity_id_last_updated_ts" + self.instance, + self.session_maker, + "events", + "ix_events_event_type_time_fired_ts", + ) + _create_index( + self.instance, + self.session_maker, + "states", + "ix_states_entity_id_last_updated_ts", + ) + _create_index( + self.instance, self.session_maker, "states", "ix_states_last_updated_ts" ) - _create_index(self.session_maker, "states", "ix_states_last_updated_ts") _migrate_columns_to_timestamp(self.instance, self.session_maker, self.engine) @@ -1559,16 +1629,23 @@ class _SchemaVersion34Migrator(_SchemaVersionMigrator, target_version=34): f"last_reset_ts {self.column_types.timestamp_type}", ], ) - _create_index(self.session_maker, "statistics", "ix_statistics_start_ts") _create_index( - self.session_maker, "statistics", "ix_statistics_statistic_id_start_ts" + self.instance, self.session_maker, "statistics", "ix_statistics_start_ts" ) _create_index( + self.instance, + self.session_maker, + "statistics", + "ix_statistics_statistic_id_start_ts", + ) + _create_index( + self.instance, self.session_maker, "statistics_short_term", "ix_statistics_short_term_start_ts", ) _create_index( + self.instance, self.session_maker, "statistics_short_term", "ix_statistics_short_term_statistic_id_start_ts", @@ -1618,8 +1695,12 @@ class _SchemaVersion36Migrator(_SchemaVersionMigrator, target_version=36): f"context_parent_id_bin {self.column_types.context_bin_type}", ], ) - _create_index(self.session_maker, "events", "ix_events_context_id_bin") - _create_index(self.session_maker, "states", "ix_states_context_id_bin") + _create_index( + self.instance, self.session_maker, "events", "ix_events_context_id_bin" + ) + _create_index( + self.instance, self.session_maker, "states", "ix_states_context_id_bin" + ) class _SchemaVersion37Migrator(_SchemaVersionMigrator, target_version=37): @@ -1630,10 +1711,15 @@ class _SchemaVersion37Migrator(_SchemaVersionMigrator, target_version=37): "events", [f"event_type_id {self.column_types.big_int_type}"], ) - _create_index(self.session_maker, "events", "ix_events_event_type_id") + _create_index( + self.instance, self.session_maker, "events", "ix_events_event_type_id" + ) _drop_index(self.session_maker, "events", "ix_events_event_type_time_fired_ts") _create_index( - self.session_maker, "events", "ix_events_event_type_id_time_fired_ts" + self.instance, + self.session_maker, + "events", + "ix_events_event_type_id_time_fired_ts", ) @@ -1645,9 +1731,14 @@ class _SchemaVersion38Migrator(_SchemaVersionMigrator, target_version=38): "states", [f"metadata_id {self.column_types.big_int_type}"], ) - _create_index(self.session_maker, "states", "ix_states_metadata_id") _create_index( - self.session_maker, "states", "ix_states_metadata_id_last_updated_ts" + self.instance, self.session_maker, "states", "ix_states_metadata_id" + ) + _create_index( + self.instance, + self.session_maker, + "states", + "ix_states_metadata_id_last_updated_ts", ) @@ -1731,8 +1822,15 @@ class _SchemaVersion40Migrator(_SchemaVersionMigrator, target_version=40): class _SchemaVersion41Migrator(_SchemaVersionMigrator, target_version=41): def _apply_update(self) -> None: """Version specific update method.""" - _create_index(self.session_maker, "event_types", "ix_event_types_event_type") - _create_index(self.session_maker, "states_meta", "ix_states_meta_entity_id") + _create_index( + self.instance, + self.session_maker, + "event_types", + "ix_event_types_event_type", + ) + _create_index( + self.instance, self.session_maker, "states_meta", "ix_states_meta_entity_id" + ) class _SchemaVersion42Migrator(_SchemaVersionMigrator, target_version=42): @@ -2319,7 +2417,7 @@ class DataMigrationStatus: class BaseMigration(ABC): """Base class for migrations.""" - index_to_drop: tuple[str, str] | None = None + index_to_drop: tuple[str, str, type[DeclarativeBase]] | None = None required_schema_version = 0 # Schema version required to run migration queries max_initial_schema_version: int # Skip migration if db created after this version migration_version = 1 @@ -2349,12 +2447,12 @@ class BaseMigration(ABC): """Migrate some data, returns True if migration is completed.""" status = self.migrate_data_impl(instance) if status.migration_done: - if self.index_to_drop is not None: - table, index = self.index_to_drop - _drop_index(instance.get_session, table, index) with session_scope(session=instance.get_session()) as session: self.migration_done(instance, session) _mark_migration_done(session, self.__class__) + if self.index_to_drop is not None: + table, index, _ = self.index_to_drop + _drop_index(instance.get_session, table, index) return not status.needs_migrate @abstractmethod @@ -2393,25 +2491,31 @@ class BaseMigration(ABC): "Data migration '%s' needed, schema too old", self.migration_id ) return True + has_needed_index = self._has_needed_index(session) + if has_needed_index is True: + # The index to be removed by the migration still exists + _LOGGER.info( + "Data migration '%s' needed, index to drop still exists", + self.migration_id, + ) + return True if self.migration_changes.get(self.migration_id, -1) >= self.migration_version: # The migration changes table indicates that the migration has been done _LOGGER.debug( "Data migration '%s' not needed, already completed", self.migration_id ) return False - # We do not know if the migration is done from the - # migration changes table so we must check the index and data - # This is the slow path - if ( - self.index_to_drop is not None - and get_index_by_name(session, self.index_to_drop[0], self.index_to_drop[1]) - is not None - ): + if has_needed_index is False: + # The index to be removed by the migration does not exist, but the migration + # changes table indicates that the migration has not been done _LOGGER.info( - "Data migration '%s' needed, index to drop still exists", + "Data migration '%s' needed, index to drop does not exist", self.migration_id, ) return True + # We do not know if the migration is done from the + # migration changes table or the index so we must check the data + # This is the slow path needs_migrate = self.needs_migrate_impl(instance, session) if needs_migrate.migration_done: _mark_migration_done(session, self.__class__) @@ -2422,6 +2526,13 @@ class BaseMigration(ABC): ) return needs_migrate.needs_migrate + def _has_needed_index(self, session: Session) -> bool | None: + """Check if the index needed by the migration exists.""" + if self.index_to_drop is None: + return None + table_name, index_name, _ = self.index_to_drop + return get_index_by_name(session, table_name, index_name) is not None + class BaseOffLineMigration(BaseMigration): """Base class for off line migrations.""" @@ -2435,6 +2546,7 @@ class BaseOffLineMigration(BaseMigration): _LOGGER.debug("Migration not needed for '%s'", self.migration_id) self.migration_done(instance, session) return + self._ensure_index_exists(instance) _LOGGER.warning( "The database is about to do data migration step '%s', %s", self.migration_id, @@ -2449,6 +2561,25 @@ class BaseOffLineMigration(BaseMigration): """Migrate some data, returns True if migration is completed.""" return self._migrate_data(instance) + def _ensure_index_exists(self, instance: Recorder) -> None: + """Ensure the index needed by the migration exists.""" + if not self.index_to_drop: + return + table_name, index_name, base = self.index_to_drop + with session_scope(session=instance.get_session()) as session: + if get_index_by_name(session, table_name, index_name) is not None: + return + _LOGGER.warning( + ( + "Data migration step '%s' needs index `%s` on table `%s`, but " + "it does not exist and will be added now" + ), + self.migration_id, + index_name, + table_name, + ) + _create_index(instance, instance.get_session, table_name, index_name, base=base) + class BaseRunTimeMigration(BaseMigration): """Base class for run time migrations.""" @@ -2492,7 +2623,7 @@ class StatesContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): max_initial_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION - 1 migration_id = "state_context_id_as_binary" migration_version = 2 - index_to_drop = ("states", "ix_states_context_id") + index_to_drop = ("states", "ix_states_context_id", LegacyBase) def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate states context_ids to use binary format, return True if completed.""" @@ -2536,7 +2667,7 @@ class EventsContextIDMigration(BaseMigrationWithQuery, BaseOffLineMigration): max_initial_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION - 1 migration_id = "event_context_id_as_binary" migration_version = 2 - index_to_drop = ("events", "ix_events_context_id") + index_to_drop = ("events", "ix_events_context_id", LegacyBase) def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate events context_ids to use binary format, return True if completed.""" @@ -2814,7 +2945,11 @@ class EntityIDPostMigration(BaseMigrationWithQuery, BaseOffLineMigration): migration_id = "entity_id_post_migration" max_initial_schema_version = STATES_META_SCHEMA_VERSION - 1 - index_to_drop = (TABLE_STATES, LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX) + index_to_drop = ( + TABLE_STATES, + LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX, + LegacyBase, + ) def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate some data, returns True if migration is completed.""" diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 462db70496a..052e9202715 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -600,7 +600,7 @@ async def test_schema_migrate( start=self.recorder_runs_manager.recording_start, created=dt_util.utcnow() ) - def _sometimes_failing_create_index(*args): + def _sometimes_failing_create_index(*args, **kwargs): """Make the first index create raise a retryable error to ensure we retry.""" if recorder_db_url.startswith("mysql://"): nonlocal create_calls @@ -609,7 +609,7 @@ async def test_schema_migrate( mysql_exception = OperationalError("statement", {}, []) mysql_exception.orig = Exception(1205, "retryable") raise mysql_exception - real_create_index(*args) + real_create_index(*args, **kwargs) with ( patch( @@ -712,7 +712,7 @@ def test_forgiving_add_index(recorder_db_url: str) -> None: instance = Mock() instance.get_session = Mock(return_value=session) migration._create_index( - instance.get_session, "states", "ix_states_context_id_bin" + instance, instance.get_session, "states", "ix_states_context_id_bin" ) engine.dispose() @@ -788,7 +788,7 @@ def test_forgiving_add_index_with_other_db_types( with patch( "homeassistant.components.recorder.migration.Table", return_value=mocked_table ): - migration._create_index(Mock(), "states", "ix_states_context_id") + migration._create_index(Mock(), Mock(), "states", "ix_states_context_id") assert "already exists on states" in caplog.text assert "continuing" in caplog.text diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index fa14570bc6b..677abd6083c 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -1,6 +1,6 @@ """Test run time migrations are remembered in the migration_changes table.""" -from collections.abc import Callable +from collections.abc import Callable, Generator import importlib import sys from unittest.mock import Mock, patch @@ -8,6 +8,7 @@ from unittest.mock import Mock, patch import pytest from sqlalchemy import create_engine from sqlalchemy.orm import Session +from sqlalchemy.schema import Index from homeassistant.components import recorder from homeassistant.components.recorder import core, migration, statistics @@ -87,138 +88,165 @@ def _create_engine_test( @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage @pytest.mark.parametrize( - ("initial_version", "expected_migrator_calls"), + ("initial_version", "expected_migrator_calls", "expected_created_indices"), + # expected_migrator_calls is a dict of + # migrator_id: (needs_migrate_calls, migrate_data_calls) [ ( 27, { - "state_context_id_as_binary": 1, - "event_context_id_as_binary": 1, - "event_type_id_migration": 1, - "entity_id_migration": 1, - "event_id_post_migration": 1, - "entity_id_post_migration": 1, + "state_context_id_as_binary": (0, 1), + "event_context_id_as_binary": (0, 1), + "event_type_id_migration": (2, 1), + "entity_id_migration": (2, 1), + "event_id_post_migration": (1, 1), + "entity_id_post_migration": (0, 1), }, + [ + "ix_states_context_id", + "ix_events_context_id", + "ix_states_entity_id_last_updated_ts", + ], ), ( 28, { - "state_context_id_as_binary": 1, - "event_context_id_as_binary": 1, - "event_type_id_migration": 1, - "entity_id_migration": 1, - "event_id_post_migration": 0, - "entity_id_post_migration": 1, + "state_context_id_as_binary": (0, 1), + "event_context_id_as_binary": (0, 1), + "event_type_id_migration": (2, 1), + "entity_id_migration": (2, 1), + "event_id_post_migration": (0, 0), + "entity_id_post_migration": (0, 1), }, + [ + "ix_states_context_id", + "ix_events_context_id", + "ix_states_entity_id_last_updated_ts", + ], ), ( 36, { - "state_context_id_as_binary": 0, - "event_context_id_as_binary": 0, - "event_type_id_migration": 1, - "entity_id_migration": 1, - "event_id_post_migration": 0, - "entity_id_post_migration": 1, + "state_context_id_as_binary": (0, 0), + "event_context_id_as_binary": (0, 0), + "event_type_id_migration": (2, 1), + "entity_id_migration": (2, 1), + "event_id_post_migration": (0, 0), + "entity_id_post_migration": (0, 1), }, + ["ix_states_entity_id_last_updated_ts"], ), ( 37, { - "state_context_id_as_binary": 0, - "event_context_id_as_binary": 0, - "event_type_id_migration": 0, - "entity_id_migration": 1, - "event_id_post_migration": 0, - "entity_id_post_migration": 1, + "state_context_id_as_binary": (0, 0), + "event_context_id_as_binary": (0, 0), + "event_type_id_migration": (0, 0), + "entity_id_migration": (2, 1), + "event_id_post_migration": (0, 0), + "entity_id_post_migration": (0, 1), }, + ["ix_states_entity_id_last_updated_ts"], ), ( 38, { - "state_context_id_as_binary": 0, - "event_context_id_as_binary": 0, - "event_type_id_migration": 0, - "entity_id_migration": 0, - "event_id_post_migration": 0, - "entity_id_post_migration": 0, + "state_context_id_as_binary": (0, 0), + "event_context_id_as_binary": (0, 0), + "event_type_id_migration": (0, 0), + "entity_id_migration": (0, 0), + "event_id_post_migration": (0, 0), + "entity_id_post_migration": (0, 0), }, + [], ), ( SCHEMA_VERSION, { - "state_context_id_as_binary": 0, - "event_context_id_as_binary": 0, - "event_type_id_migration": 0, - "entity_id_migration": 0, - "event_id_post_migration": 0, - "entity_id_post_migration": 0, + "state_context_id_as_binary": (0, 0), + "event_context_id_as_binary": (0, 0), + "event_type_id_migration": (0, 0), + "entity_id_migration": (0, 0), + "event_id_post_migration": (0, 0), + "entity_id_post_migration": (0, 0), }, + [], ), ], ) -async def test_data_migrator_new_database( +async def test_data_migrator_logic( async_test_recorder: RecorderInstanceGenerator, initial_version: int, - expected_migrator_calls: dict[str, int], + expected_migrator_calls: dict[str, tuple[int, int]], + expected_created_indices: list[str], ) -> None: - """Test that the data migrators are not executed on a new database.""" + """Test the data migrator logic. + + - The data migrators should not be executed on a new database. + - Indices needed by the migrators should be created if missing. + """ config = {recorder.CONF_COMMIT_INTERVAL: 1} - def needs_migrate_mock() -> Mock: - return Mock( - spec_set=[], - return_value=migration.DataMigrationStatus( - needs_migrate=False, migration_done=True + def migrator_mock() -> dict[str, Mock]: + return { + "needs_migrate": Mock( + spec_set=[], + return_value=migration.DataMigrationStatus( + needs_migrate=True, migration_done=False + ), ), - ) + "migrate_data": Mock(spec_set=[], return_value=True), + } migrator_mocks = { - "state_context_id_as_binary": needs_migrate_mock(), - "event_context_id_as_binary": needs_migrate_mock(), - "event_type_id_migration": needs_migrate_mock(), - "entity_id_migration": needs_migrate_mock(), - "event_id_post_migration": needs_migrate_mock(), - "entity_id_post_migration": needs_migrate_mock(), + "state_context_id_as_binary": migrator_mock(), + "event_context_id_as_binary": migrator_mock(), + "event_type_id_migration": migrator_mock(), + "entity_id_migration": migrator_mock(), + "event_id_post_migration": migrator_mock(), + "entity_id_post_migration": migrator_mock(), } + def patch_check( + migrator_id: str, migrator_class: type[migration.BaseMigration] + ) -> Generator[None]: + return patch.object( + migrator_class, + "needs_migrate_impl", + side_effect=migrator_mocks[migrator_id]["needs_migrate"], + ) + + def patch_migrate( + migrator_id: str, migrator_class: type[migration.BaseMigration] + ) -> Generator[None]: + return patch.object( + migrator_class, + "migrate_data", + side_effect=migrator_mocks[migrator_id]["migrate_data"], + ) + with ( - patch.object( - migration.StatesContextIDMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["state_context_id_as_binary"], - ), - patch.object( - migration.EventsContextIDMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["event_context_id_as_binary"], - ), - patch.object( - migration.EventTypeIDMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["event_type_id_migration"], - ), - patch.object( - migration.EntityIDMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["entity_id_migration"], - ), - patch.object( - migration.EventIDPostMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["event_id_post_migration"], - ), - patch.object( - migration.EntityIDPostMigration, - "needs_migrate_impl", - side_effect=migrator_mocks["entity_id_post_migration"], - ), + patch_check("state_context_id_as_binary", migration.StatesContextIDMigration), + patch_check("event_context_id_as_binary", migration.EventsContextIDMigration), + patch_check("event_type_id_migration", migration.EventTypeIDMigration), + patch_check("entity_id_migration", migration.EntityIDMigration), + patch_check("event_id_post_migration", migration.EventIDPostMigration), + patch_check("entity_id_post_migration", migration.EntityIDPostMigration), + patch_migrate("state_context_id_as_binary", migration.StatesContextIDMigration), + patch_migrate("event_context_id_as_binary", migration.EventsContextIDMigration), + patch_migrate("event_type_id_migration", migration.EventTypeIDMigration), + patch_migrate("entity_id_migration", migration.EntityIDMigration), + patch_migrate("event_id_post_migration", migration.EventIDPostMigration), + patch_migrate("entity_id_post_migration", migration.EntityIDPostMigration), patch( CREATE_ENGINE_TARGET, new=_create_engine_test( SCHEMA_MODULE_CURRENT, initial_version=initial_version ), ), + patch( + "sqlalchemy.schema.Index.create", autospec=True, wraps=Index.create + ) as wrapped_idx_create, ): async with ( async_test_home_assistant() as hass, @@ -231,8 +259,15 @@ async def test_data_migrator_new_database( await hass.async_block_till_done() await hass.async_stop() + index_names = [call[1][0].name for call in wrapped_idx_create.mock_calls] + assert index_names == expected_created_indices + + # Check each data migrator's needs_migrate_impl and migrate_data methods were called + # the expected number of times. for migrator, mock in migrator_mocks.items(): - assert len(mock.mock_calls) == expected_migrator_calls[migrator] + needs_migrate_calls, migrate_data_calls = expected_migrator_calls[migrator] + assert len(mock["needs_migrate"].mock_calls) == needs_migrate_calls + assert len(mock["migrate_data"].mock_calls) == migrate_data_calls @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index 2bd1e7fd7f7..d68d1550268 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -1027,7 +1027,7 @@ async def test_purge_can_mix_legacy_and_new_format( def _recreate_legacy_events_index(): """Recreate the legacy events index since its no longer created on new instances.""" migration._create_index( - recorder_mock.get_session, "states", "ix_states_event_id" + recorder_mock, recorder_mock.get_session, "states", "ix_states_event_id" ) recorder_mock.use_legacy_events_index = True @@ -1178,7 +1178,7 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( def _recreate_legacy_events_index(): """Recreate the legacy events index since its no longer created on new instances.""" migration._create_index( - recorder_mock.get_session, "states", "ix_states_event_id" + recorder_mock, recorder_mock.get_session, "states", "ix_states_event_id" ) recorder_mock.use_legacy_events_index = True From 1bdda0249e775cf24611a2d45e2f161c2b7a372e Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Wed, 18 Dec 2024 21:38:52 +0100 Subject: [PATCH 447/677] Bump PyViCare to 2.39.0 (#133519) --- homeassistant/components/vicare/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 0bb5594e829..72bc3de53d8 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.38.0"] + "requirements": ["PyViCare==2.39.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 79f1411ea42..75d0a88b009 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -100,7 +100,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.38.0 +PyViCare==2.39.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 32e815babdd..2052aa1d560 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -94,7 +94,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.38.0 +PyViCare==2.39.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 From e4bb351d2d11d0b8cad56debd48cc3126919c4e4 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Wed, 18 Dec 2024 21:41:22 +0100 Subject: [PATCH 448/677] Bump uiprotect to 7.1.0 (#133520) * Bump uiprotect to version 7.1.0 * Add aiports to bootstrap fixture in unifiprotect tests --- homeassistant/components/unifiprotect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/unifiprotect/conftest.py | 1 + 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 81ef72ec50d..1226f96c253 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==7.0.2", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==7.1.0", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 75d0a88b009..49c8bc8534f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2910,7 +2910,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==7.0.2 +uiprotect==7.1.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2052aa1d560..1b1226cd6d6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2329,7 +2329,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==7.0.2 +uiprotect==7.1.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/tests/components/unifiprotect/conftest.py b/tests/components/unifiprotect/conftest.py index 3ed559b71ec..352c33297ba 100644 --- a/tests/components/unifiprotect/conftest.py +++ b/tests/components/unifiprotect/conftest.py @@ -98,6 +98,7 @@ def bootstrap_fixture(nvr: NVR): data["events"] = [] data["doorlocks"] = [] data["chimes"] = [] + data["aiports"] = [] return Bootstrap.from_unifi_dict(**data) From ba3fca53b0ed81e0aa41fb734034df69a7af305e Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 18 Dec 2024 21:49:32 +0100 Subject: [PATCH 449/677] Reolink platinum quality scale (#133514) --- homeassistant/components/reolink/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 7aced174e30..e5e8afc1d63 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,6 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], + "quality_scale": "platinum", "requirements": ["reolink-aio==0.11.5"] } From 2a9082559a143431faa641c24bd27c73c4019adb Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Wed, 18 Dec 2024 22:35:58 +0100 Subject: [PATCH 450/677] Fix names and description of two actions (#133528) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The two actions enable_motion_recording and disable_motion_recording use "Enables" and "Disables" in their names. This is inconsistent with the name of the actions, all other actions of this component, and the standard way of naming them, too. In addition the description of the latter misses the "s" which causes additional inconsistency – especially in translations. --- homeassistant/components/amcrest/strings.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/amcrest/strings.json b/homeassistant/components/amcrest/strings.json index 816511bf05e..807c75e1ac8 100644 --- a/homeassistant/components/amcrest/strings.json +++ b/homeassistant/components/amcrest/strings.json @@ -41,7 +41,7 @@ } }, "enable_motion_recording": { - "name": "Enables motion recording", + "name": "Enable motion recording", "description": "Enables recording a clip to camera storage when motion is detected.", "fields": { "entity_id": { @@ -51,8 +51,8 @@ } }, "disable_motion_recording": { - "name": "Disables motion recording", - "description": "Disable recording a clip to camera storage when motion is detected.", + "name": "Disable motion recording", + "description": "Disables recording a clip to camera storage when motion is detected.", "fields": { "entity_id": { "name": "[%key:component::amcrest::services::enable_recording::fields::entity_id::name%]", From 9e6a8638ddf514da45c52c934e2f8320242abf17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Wed, 18 Dec 2024 21:38:57 +0000 Subject: [PATCH 451/677] Bump idasen-ha to 2.6.3 (#133508) This is a minor bump that adds py.typed --- homeassistant/components/idasen_desk/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/idasen_desk/manifest.json b/homeassistant/components/idasen_desk/manifest.json index 2f53ec20e11..7f44f8bbf44 100644 --- a/homeassistant/components/idasen_desk/manifest.json +++ b/homeassistant/components/idasen_desk/manifest.json @@ -12,5 +12,5 @@ "documentation": "https://www.home-assistant.io/integrations/idasen_desk", "integration_type": "device", "iot_class": "local_push", - "requirements": ["idasen-ha==2.6.2"] + "requirements": ["idasen-ha==2.6.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 49c8bc8534f..d8dc08ca301 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1181,7 +1181,7 @@ ical==8.2.0 icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.6.2 +idasen-ha==2.6.3 # homeassistant.components.network ifaddr==0.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1b1226cd6d6..a988c0836b8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1001,7 +1001,7 @@ ical==8.2.0 icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.6.2 +idasen-ha==2.6.3 # homeassistant.components.network ifaddr==0.2.0 From 03707e6308628b6010045282d5e8b522bdd932dc Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Wed, 18 Dec 2024 22:40:30 +0100 Subject: [PATCH 452/677] Improve field descriptions for Download file action (#133413) * Improve field descriptions for Download file action Currently two of the field descriptions for the Download file action don't explain exactly what should be entered but rather explain these like additional actions. The third, the Overwrite file option is misleading as it does not refer to an existing file. This commit fixes both issues by explaining the purpose of all three fields in a slightly more detailed fashion. * Update homeassistant/components/downloader/strings.json Co-authored-by: Josef Zweck * Update homeassistant/components/downloader/strings.json Co-authored-by: Josef Zweck --------- Co-authored-by: Josef Zweck --- homeassistant/components/downloader/strings.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/downloader/strings.json b/homeassistant/components/downloader/strings.json index 11a2bda8fce..7db7ea459d7 100644 --- a/homeassistant/components/downloader/strings.json +++ b/homeassistant/components/downloader/strings.json @@ -23,15 +23,15 @@ }, "subdir": { "name": "Subdirectory", - "description": "Download into subdirectory." + "description": "Relative download path." }, "filename": { "name": "Filename", - "description": "Determine the filename." + "description": "Custom name for the downloaded file." }, "overwrite": { "name": "Overwrite", - "description": "Whether to overwrite the file or not." + "description": "Overwrite file if it exists." } } } From 9f3c549f8d58eef6740442ed941a9f588b9db0b2 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 18 Dec 2024 23:46:18 +0100 Subject: [PATCH 453/677] Add integration setup tests to Peblar Rocksolid EV Chargers (#133532) --- tests/components/peblar/conftest.py | 7 +-- tests/components/peblar/test_init.py | 69 ++++++++++++++++++++++++++++ 2 files changed, 73 insertions(+), 3 deletions(-) create mode 100644 tests/components/peblar/test_init.py diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index dfe6aabc6bc..583b2cbe7a5 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -38,9 +38,10 @@ def mock_setup_entry() -> Generator[None]: @pytest.fixture def mock_peblar() -> Generator[MagicMock]: """Return a mocked Peblar client.""" - with patch( - "homeassistant.components.peblar.config_flow.Peblar", autospec=True - ) as peblar_mock: + with ( + patch("homeassistant.components.peblar.Peblar", autospec=True) as peblar_mock, + patch("homeassistant.components.peblar.config_flow.Peblar", new=peblar_mock), + ): peblar = peblar_mock.return_value peblar.system_information.return_value = PeblarSystemInformation.from_json( load_fixture("system_information.json", DOMAIN) diff --git a/tests/components/peblar/test_init.py b/tests/components/peblar/test_init.py new file mode 100644 index 00000000000..78508501ba8 --- /dev/null +++ b/tests/components/peblar/test_init.py @@ -0,0 +1,69 @@ +"""Integration tests for the Peblar integration.""" + +from unittest.mock import MagicMock + +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError +import pytest + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, +) -> None: + """Test the Peblar configuration entry loading/unloading.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert len(mock_peblar.login.mock_calls) == 1 + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + "exception", + [PeblarConnectionError, PeblarError], +) +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, + exception: Exception, +) -> None: + """Test the Peblar configuration entry not ready.""" + mock_peblar.login.side_effect = exception + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert len(mock_peblar.login.mock_calls) == 1 + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_config_entry_authentication_failed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, +) -> None: + """Test authentication error, aborts setup.""" + mock_config_entry.add_to_hass(hass) + + mock_peblar.login.side_effect = PeblarAuthenticationError + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR From 0076bd838942e908c2ad922d0b5964882bb20daf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Wed, 18 Dec 2024 22:47:24 +0000 Subject: [PATCH 454/677] Simplify Idasen Desk entity properties (#133536) --- homeassistant/components/idasen_desk/cover.py | 12 +++++------ .../components/idasen_desk/sensor.py | 21 +++++-------------- 2 files changed, 10 insertions(+), 23 deletions(-) diff --git a/homeassistant/components/idasen_desk/cover.py b/homeassistant/components/idasen_desk/cover.py index 95474ea8750..a8ba0983e99 100644 --- a/homeassistant/components/idasen_desk/cover.py +++ b/homeassistant/components/idasen_desk/cover.py @@ -12,7 +12,7 @@ from homeassistant.components.cover import ( CoverEntity, CoverEntityFeature, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -46,7 +46,6 @@ class IdasenDeskCover(IdasenDeskEntity, CoverEntity): def __init__(self, coordinator: IdasenDeskCoordinator) -> None: """Initialize an Idasen Desk cover.""" super().__init__(coordinator.address, coordinator) - self._attr_current_cover_position = self._desk.height_percent @property def is_closed(self) -> bool: @@ -83,8 +82,7 @@ class IdasenDeskCover(IdasenDeskEntity, CoverEntity): "Failed to move to specified position: Bluetooth error" ) from err - @callback - def _handle_coordinator_update(self, *args: Any) -> None: - """Handle data update.""" - self._attr_current_cover_position = self._desk.height_percent - self.async_write_ha_state() + @property + def current_cover_position(self) -> int | None: + """Return the current cover position.""" + return self._desk.height_percent diff --git a/homeassistant/components/idasen_desk/sensor.py b/homeassistant/components/idasen_desk/sensor.py index d4f629b85a8..4613d316a52 100644 --- a/homeassistant/components/idasen_desk/sensor.py +++ b/homeassistant/components/idasen_desk/sensor.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Any from homeassistant.components.sensor import ( SensorDeviceClass, @@ -13,7 +12,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import UnitOfLength -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import IdasenDeskConfigEntry, IdasenDeskCoordinator @@ -68,17 +67,7 @@ class IdasenDeskSensor(IdasenDeskEntity, SensorEntity): super().__init__(f"{description.key}-{coordinator.address}", coordinator) self.entity_description = description - async def async_added_to_hass(self) -> None: - """When entity is added to hass.""" - await super().async_added_to_hass() - self._update_native_value() - - @callback - def _handle_coordinator_update(self, *args: Any) -> None: - """Handle data update.""" - self._update_native_value() - super()._handle_coordinator_update() - - def _update_native_value(self) -> None: - """Update the native value attribute.""" - self._attr_native_value = self.entity_description.value_fn(self.coordinator) + @property + def native_value(self) -> float | None: + """Return the value reported by the sensor.""" + return self.entity_description.value_fn(self.coordinator) From 35601480d2a8aeed9f6993ef1d5f523ac3b3823b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 18 Dec 2024 12:48:39 -1000 Subject: [PATCH 455/677] Bump aiohttp to 3.11.11 (#133530) --- homeassistant/components/image/__init__.py | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/image/__init__.py b/homeassistant/components/image/__init__.py index ea235127894..dbb5962eabf 100644 --- a/homeassistant/components/image/__init__.py +++ b/homeassistant/components/image/__init__.py @@ -348,7 +348,7 @@ async def async_get_still_stream( # While this results in additional bandwidth usage, # given the low frequency of image updates, it is acceptable. frame.extend(frame) - await response.write(frame) # type: ignore[arg-type] + await response.write(frame) return True event = asyncio.Event() diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index add20ef0870..49a6841d3a1 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -5,7 +5,7 @@ aiodiscover==2.1.0 aiodns==3.2.0 aiohasupervisor==0.2.2b2 aiohttp-fast-zlib==0.2.0 -aiohttp==3.11.10 +aiohttp==3.11.11 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index 91acea30b52..af79a173bab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 "aiohasupervisor==0.2.2b2", - "aiohttp==3.11.10", + "aiohttp==3.11.11", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index e4346c3e517..a6fda6760d4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.2.2b2 -aiohttp==3.11.10 +aiohttp==3.11.11 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 aiozoneinfo==0.2.1 From 3fe08a722330e63a5e45951586aa409f70b696ff Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 19 Dec 2024 00:39:14 +0100 Subject: [PATCH 456/677] Add zeroconf discovery to Peblar Rocksolid EV chargers (#133529) --- .../components/peblar/config_flow.py | 62 +++++- homeassistant/components/peblar/manifest.json | 3 +- homeassistant/components/peblar/strings.json | 12 +- homeassistant/generated/zeroconf.py | 4 + tests/components/peblar/test_config_flow.py | 208 +++++++++++++++++- 5 files changed, 285 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/peblar/config_flow.py b/homeassistant/components/peblar/config_flow.py index 056d4a68be6..a9cfb7d89b9 100644 --- a/homeassistant/components/peblar/config_flow.py +++ b/homeassistant/components/peblar/config_flow.py @@ -8,6 +8,7 @@ from aiohttp import CookieJar from peblar import Peblar, PeblarAuthenticationError, PeblarConnectionError import voluptuous as vol +from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.helpers.aiohttp_client import async_create_clientsession @@ -25,6 +26,8 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _host: str + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -49,7 +52,9 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - await self.async_set_unique_id(info.product_serial_number) + await self.async_set_unique_id( + info.product_serial_number, raise_on_progress=False + ) self._abort_if_unique_id_configured() return self.async_create_entry(title="Peblar", data=user_input) else: @@ -69,3 +74,58 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): ), errors=errors, ) + + async def async_step_zeroconf( + self, discovery_info: zeroconf.ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery of a Peblar device.""" + if not (sn := discovery_info.properties.get("sn")): + return self.async_abort(reason="no_serial_number") + + await self.async_set_unique_id(sn) + self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host}) + + self._host = discovery_info.host + self.context.update({"configuration_url": f"http://{discovery_info.host}"}) + return await self.async_step_zeroconf_confirm() + + async def async_step_zeroconf_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by zeroconf.""" + errors = {} + + if user_input is not None: + peblar = Peblar( + host=self._host, + session=async_create_clientsession( + self.hass, cookie_jar=CookieJar(unsafe=True) + ), + ) + try: + await peblar.login(password=user_input[CONF_PASSWORD]) + except PeblarAuthenticationError: + errors[CONF_PASSWORD] = "invalid_auth" + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title="Peblar", + data={ + CONF_HOST: self._host, + CONF_PASSWORD: user_input[CONF_PASSWORD], + }, + ) + + return self.async_show_form( + step_id="zeroconf_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } + ), + errors=errors, + ) diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json index 6de605c95dc..1ae2a491ba9 100644 --- a/homeassistant/components/peblar/manifest.json +++ b/homeassistant/components/peblar/manifest.json @@ -7,5 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "quality_scale": "bronze", - "requirements": ["peblar==0.2.1"] + "requirements": ["peblar==0.2.1"], + "zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }] } diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 9bf4803b592..e5fa1e85a6a 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -11,6 +11,15 @@ "host": "The hostname or IP address of your Peblar charger on your home network.", "password": "The same password as you use to log in to the Peblar device' local web interface." } + }, + "zeroconf_confirm": { + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant.", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::peblar::config::step::user::data_description::password%]" + } } }, "error": { @@ -19,7 +28,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_serial_number": "The discovered Peblar device did not provide a serial number." } } } diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 2c914c2d240..66c576d8840 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -558,6 +558,10 @@ ZEROCONF = { "manufacturer": "nettigo", }, }, + { + "domain": "peblar", + "name": "pblr-*", + }, { "domain": "powerfox", "name": "powerfox*", diff --git a/tests/components/peblar/test_config_flow.py b/tests/components/peblar/test_config_flow.py index 0b2fa89e068..4e3ab008047 100644 --- a/tests/components/peblar/test_config_flow.py +++ b/tests/components/peblar/test_config_flow.py @@ -1,12 +1,14 @@ """Configuration flow tests for the Peblar integration.""" +from ipaddress import ip_address from unittest.mock import MagicMock from peblar import PeblarAuthenticationError, PeblarConnectionError import pytest +from homeassistant.components import zeroconf from homeassistant.components.peblar.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -113,3 +115,207 @@ async def test_user_flow_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_peblar") +async def test_zeroconf_flow(hass: HomeAssistant) -> None: + """Test the zeroconf happy flow from start to finish.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + port=80, + hostname="pblr-0000645.local.", + name="mock_name", + properties={ + "sn": "23-45-A4O-MOF", + "version": "1.6.1+1+WL-1", + }, + type="mock_type", + ), + ) + + assert result["step_id"] == "zeroconf_confirm" + assert result["type"] is FlowResultType.FORM + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 1 + assert progress[0].get("flow_id") == result["flow_id"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_PASSWORD: "OMGPINEAPPLES"} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "23-45-A4O-MOF" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPINEAPPLES", + } + assert not config_entry.options + + +async def test_zeroconf_flow_abort_no_serial(hass: HomeAssistant) -> None: + """Test the zeroconf aborts when it advertises incompatible data.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + port=80, + hostname="pblr-0000645.local.", + name="mock_name", + properties={}, + type="mock_type", + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_serial_number" + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [ + (PeblarConnectionError, {"base": "unknown"}), + (PeblarAuthenticationError, {CONF_PASSWORD: "invalid_auth"}), + (Exception, {"base": "unknown"}), + ], +) +async def test_zeroconf_flow_errors( + hass: HomeAssistant, + mock_peblar: MagicMock, + side_effect: Exception, + expected_error: dict[str, str], +) -> None: + """Test we show form on a error.""" + mock_peblar.login.side_effect = side_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + port=80, + hostname="pblr-0000645.local.", + name="mock_name", + properties={ + "sn": "23-45-A4O-MOF", + "version": "1.6.1+1+WL-1", + }, + type="mock_type", + ), + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "zeroconf_confirm" + assert result["errors"] == expected_error + + mock_peblar.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.unique_id == "23-45-A4O-MOF" + assert config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + } + assert not config_entry.options + + +@pytest.mark.usefixtures("mock_peblar") +async def test_zeroconf_flow_not_discovered_again( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the zeroconf doesn't re-discover an existing device.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + port=80, + hostname="pblr-0000645.local.", + name="mock_name", + properties={ + "sn": "23-45-A4O-MOF", + "version": "1.6.1+1+WL-1", + }, + type="mock_type", + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_peblar") +async def test_user_flow_with_zeroconf_in_progress(hass: HomeAssistant) -> None: + """Test the full happy path user flow from start to finish. + + While zeroconf discovery is already in progress. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + port=80, + hostname="pblr-0000645.local.", + name="mock_name", + properties={ + "sn": "23-45-A4O-MOF", + "version": "1.6.1+1+WL-1", + }, + type="mock_type", + ), + ) + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 1 + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 2 + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + assert not hass.config_entries.flow.async_progress() From 99698ef95d31be56bbc1c80b77babab194aab5a1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 18 Dec 2024 19:41:53 -1000 Subject: [PATCH 457/677] Optimize start time state queries for PostgreSQL (#133228) --- .../components/recorder/history/modern.py | 76 ++++++-- .../components/recorder/statistics.py | 61 ++++-- tests/components/recorder/test_history.py | 124 +++++++++++++ tests/components/recorder/test_statistics.py | 173 +++++++++++++++++- 4 files changed, 400 insertions(+), 34 deletions(-) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 9159bbc6181..279ca9c9eea 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -27,8 +27,13 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..const import LAST_REPORTED_SCHEMA_VERSION -from ..db_schema import SHARED_ATTR_OR_LEGACY_ATTRIBUTES, StateAttributes, States +from ..const import LAST_REPORTED_SCHEMA_VERSION, SupportedDialect +from ..db_schema import ( + SHARED_ATTR_OR_LEGACY_ATTRIBUTES, + StateAttributes, + States, + StatesMeta, +) from ..filters import Filters from ..models import ( LazyState, @@ -145,6 +150,7 @@ def _significant_states_stmt( no_attributes: bool, include_start_time_state: bool, run_start_ts: float | None, + lateral_join_for_start_time: bool, ) -> Select | CompoundSelect: """Query the database for significant state changes.""" include_last_changed = not significant_changes_only @@ -184,6 +190,7 @@ def _significant_states_stmt( metadata_ids, no_attributes, include_last_changed, + lateral_join_for_start_time, ).subquery(), no_attributes, include_last_changed, @@ -254,6 +261,7 @@ def get_significant_states_with_session( start_time_ts = start_time.timestamp() end_time_ts = datetime_to_timestamp_or_none(end_time) single_metadata_id = metadata_ids[0] if len(metadata_ids) == 1 else None + lateral_join_for_start_time = instance.dialect_name == SupportedDialect.POSTGRESQL stmt = lambda_stmt( lambda: _significant_states_stmt( start_time_ts, @@ -265,6 +273,7 @@ def get_significant_states_with_session( no_attributes, include_start_time_state, run_start_ts, + lateral_join_for_start_time, ), track_on=[ bool(single_metadata_id), @@ -556,30 +565,61 @@ def _get_start_time_state_for_entities_stmt( metadata_ids: list[int], no_attributes: bool, include_last_changed: bool, + lateral_join_for_start_time: bool, ) -> Select: """Baked query to get states for specific entities.""" # We got an include-list of entities, accelerate the query by filtering already # in the inner and the outer query. + if lateral_join_for_start_time: + # PostgreSQL does not support index skip scan/loose index scan + # https://wiki.postgresql.org/wiki/Loose_indexscan + # so we need to do a lateral join to get the max last_updated_ts + # for each metadata_id as a group-by is too slow. + # https://github.com/home-assistant/core/issues/132865 + max_metadata_id = StatesMeta.metadata_id.label("max_metadata_id") + max_last_updated = ( + select(func.max(States.last_updated_ts)) + .where( + (States.metadata_id == max_metadata_id) + & (States.last_updated_ts >= run_start_ts) + & (States.last_updated_ts < epoch_time) + ) + .subquery() + .lateral() + ) + most_recent_states_for_entities_by_date = ( + select(max_metadata_id, max_last_updated.c[0].label("max_last_updated")) + .select_from(StatesMeta) + .join( + max_last_updated, + StatesMeta.metadata_id == max_metadata_id, + ) + .where(StatesMeta.metadata_id.in_(metadata_ids)) + ).subquery() + else: + # Simple group-by for MySQL and SQLite, must use less + # than 1000 metadata_ids in the IN clause for MySQL + # or it will optimize poorly. + most_recent_states_for_entities_by_date = ( + select( + States.metadata_id.label("max_metadata_id"), + func.max(States.last_updated_ts).label("max_last_updated"), + ) + .filter( + (States.last_updated_ts >= run_start_ts) + & (States.last_updated_ts < epoch_time) + & States.metadata_id.in_(metadata_ids) + ) + .group_by(States.metadata_id) + .subquery() + ) + stmt = ( _stmt_and_join_attributes_for_start_state( no_attributes, include_last_changed, False ) .join( - ( - most_recent_states_for_entities_by_date := ( - select( - States.metadata_id.label("max_metadata_id"), - func.max(States.last_updated_ts).label("max_last_updated"), - ) - .filter( - (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - & States.metadata_id.in_(metadata_ids) - ) - .group_by(States.metadata_id) - .subquery() - ) - ), + most_recent_states_for_entities_by_date, and_( States.metadata_id == most_recent_states_for_entities_by_date.c.max_metadata_id, @@ -621,6 +661,7 @@ def _get_start_time_state_stmt( metadata_ids: list[int], no_attributes: bool, include_last_changed: bool, + lateral_join_for_start_time: bool, ) -> Select: """Return the states at a specific point in time.""" if single_metadata_id: @@ -641,6 +682,7 @@ def _get_start_time_state_stmt( metadata_ids, no_attributes, include_last_changed, + lateral_join_for_start_time, ) diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 3f1d5b981e3..9e47ca43c5b 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -63,6 +63,7 @@ from .db_schema import ( STATISTICS_TABLES, Statistics, StatisticsBase, + StatisticsMeta, StatisticsRuns, StatisticsShortTerm, ) @@ -1669,6 +1670,7 @@ def _augment_result_with_change( drop_sum = "sum" not in _types prev_sums = {} if tmp := _statistics_at_time( + hass, session, {metadata[statistic_id][0] for statistic_id in result}, table, @@ -2032,22 +2034,50 @@ def _generate_statistics_at_time_stmt( metadata_ids: set[int], start_time_ts: float, types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + lateral_join_for_start_time: bool, ) -> StatementLambdaElement: """Create the statement for finding the statistics for a given time.""" stmt = _generate_select_columns_for_types_stmt(table, types) - stmt += lambda q: q.join( - ( - most_recent_statistic_ids := ( - select( - func.max(table.start_ts).label("max_start_ts"), - table.metadata_id.label("max_metadata_id"), - ) - .filter(table.start_ts < start_time_ts) - .filter(table.metadata_id.in_(metadata_ids)) - .group_by(table.metadata_id) - .subquery() + if lateral_join_for_start_time: + # PostgreSQL does not support index skip scan/loose index scan + # https://wiki.postgresql.org/wiki/Loose_indexscan + # so we need to do a lateral join to get the max max_start_ts + # for each metadata_id as a group-by is too slow. + # https://github.com/home-assistant/core/issues/132865 + max_metadata_id = StatisticsMeta.id.label("max_metadata_id") + max_start = ( + select(func.max(table.start_ts)) + .filter(table.metadata_id == max_metadata_id) + .filter(table.start_ts < start_time_ts) + .filter(table.metadata_id.in_(metadata_ids)) + .subquery() + .lateral() + ) + most_recent_statistic_ids = ( + select(max_metadata_id, max_start.c[0].label("max_start_ts")) + .select_from(StatisticsMeta) + .join( + max_start, + StatisticsMeta.id == max_metadata_id, ) - ), + .where(StatisticsMeta.id.in_(metadata_ids)) + ).subquery() + else: + # Simple group-by for MySQL and SQLite, must use less + # than 1000 metadata_ids in the IN clause for MySQL + # or it will optimize poorly. + most_recent_statistic_ids = ( + select( + func.max(table.start_ts).label("max_start_ts"), + table.metadata_id.label("max_metadata_id"), + ) + .filter(table.start_ts < start_time_ts) + .filter(table.metadata_id.in_(metadata_ids)) + .group_by(table.metadata_id) + .subquery() + ) + stmt += lambda q: q.join( + most_recent_statistic_ids, and_( table.start_ts == most_recent_statistic_ids.c.max_start_ts, table.metadata_id == most_recent_statistic_ids.c.max_metadata_id, @@ -2057,6 +2087,7 @@ def _generate_statistics_at_time_stmt( def _statistics_at_time( + hass: HomeAssistant, session: Session, metadata_ids: set[int], table: type[StatisticsBase], @@ -2065,7 +2096,11 @@ def _statistics_at_time( ) -> Sequence[Row] | None: """Return last known statistics, earlier than start_time, for the metadata_ids.""" start_time_ts = start_time.timestamp() - stmt = _generate_statistics_at_time_stmt(table, metadata_ids, start_time_ts, types) + dialect_name = get_instance(hass).dialect_name + lateral_join_for_start_time = dialect_name == SupportedDialect.POSTGRESQL + stmt = _generate_statistics_at_time_stmt( + table, metadata_ids, start_time_ts, types, lateral_join_for_start_time + ) return cast(Sequence[Row], execute_stmt_lambda_element(session, stmt)) diff --git a/tests/components/recorder/test_history.py b/tests/components/recorder/test_history.py index 28b8275247c..eea4605039b 100644 --- a/tests/components/recorder/test_history.py +++ b/tests/components/recorder/test_history.py @@ -1014,3 +1014,127 @@ async def test_get_last_state_changes_with_non_existent_entity_ids_returns_empty ) -> None: """Test get_last_state_changes returns an empty dict when entities not in the db.""" assert history.get_last_state_changes(hass, 1, "nonexistent.entity") == {} + + +@pytest.mark.skip_on_db_engine(["sqlite", "mysql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.usefixtures("recorder_db_url") +async def test_get_significant_states_with_session_uses_lateral_with_postgresql( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test get_significant_states_with_session uses the lateral path with PostgreSQL.""" + entity_id = "media_player.test" + hass.states.async_set("any.other", "on") + await async_wait_recording_done(hass) + hass.states.async_set(entity_id, "off") + + def set_state(state): + """Set the state.""" + hass.states.async_set(entity_id, state, {"any": 1}) + return hass.states.get(entity_id) + + start = dt_util.utcnow().replace(microsecond=0) + point = start + timedelta(seconds=1) + point2 = start + timedelta(seconds=1, microseconds=100) + point3 = start + timedelta(seconds=1, microseconds=200) + end = point + timedelta(seconds=1, microseconds=400) + + with freeze_time(start) as freezer: + set_state("idle") + set_state("YouTube") + + freezer.move_to(point) + states = [set_state("idle")] + + freezer.move_to(point2) + states.append(set_state("Netflix")) + + freezer.move_to(point3) + states.append(set_state("Plex")) + + freezer.move_to(end) + set_state("Netflix") + set_state("Plex") + await async_wait_recording_done(hass) + + start_time = point2 + timedelta(microseconds=10) + hist = history.get_significant_states( + hass=hass, + start_time=start_time, # Pick a point where we will generate a start time state + end_time=end, + entity_ids=[entity_id, "any.other"], + include_start_time_state=True, + ) + assert len(hist[entity_id]) == 2 + + sqlalchemy_logs = "".join( + [ + record.getMessage() + for record in caplog.records + if record.name.startswith("sqlalchemy.engine") + ] + ) + # We can't patch inside the lambda so we have to check the logs + assert "JOIN LATERAL" in sqlalchemy_logs + + +@pytest.mark.skip_on_db_engine(["postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.usefixtures("recorder_db_url") +async def test_get_significant_states_with_session_uses_non_lateral_without_postgresql( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test get_significant_states_with_session does not use a the lateral path without PostgreSQL.""" + entity_id = "media_player.test" + hass.states.async_set("any.other", "on") + await async_wait_recording_done(hass) + hass.states.async_set(entity_id, "off") + + def set_state(state): + """Set the state.""" + hass.states.async_set(entity_id, state, {"any": 1}) + return hass.states.get(entity_id) + + start = dt_util.utcnow().replace(microsecond=0) + point = start + timedelta(seconds=1) + point2 = start + timedelta(seconds=1, microseconds=100) + point3 = start + timedelta(seconds=1, microseconds=200) + end = point + timedelta(seconds=1, microseconds=400) + + with freeze_time(start) as freezer: + set_state("idle") + set_state("YouTube") + + freezer.move_to(point) + states = [set_state("idle")] + + freezer.move_to(point2) + states.append(set_state("Netflix")) + + freezer.move_to(point3) + states.append(set_state("Plex")) + + freezer.move_to(end) + set_state("Netflix") + set_state("Plex") + await async_wait_recording_done(hass) + + start_time = point2 + timedelta(microseconds=10) + hist = history.get_significant_states( + hass=hass, + start_time=start_time, # Pick a point where we will generate a start time state + end_time=end, + entity_ids=[entity_id, "any.other"], + include_start_time_state=True, + ) + assert len(hist[entity_id]) == 2 + + sqlalchemy_logs = "".join( + [ + record.getMessage() + for record in caplog.records + if record.name.startswith("sqlalchemy.engine") + ] + ) + # We can't patch inside the lambda so we have to check the logs + assert "JOIN LATERAL" not in sqlalchemy_logs diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index 6b1e1a655db..55029c3eacf 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -1914,20 +1914,185 @@ def test_cache_key_for_generate_max_mean_min_statistic_in_sub_period_stmt() -> N assert cache_key_1 != cache_key_3 -def test_cache_key_for_generate_statistics_at_time_stmt() -> None: +@pytest.mark.parametrize("lateral_join_for_start_time", [True, False]) +def test_cache_key_for_generate_statistics_at_time_stmt( + lateral_join_for_start_time: bool, +) -> None: """Test cache key for _generate_statistics_at_time_stmt.""" - stmt = _generate_statistics_at_time_stmt(StatisticsShortTerm, {0}, 0.0, set()) + stmt = _generate_statistics_at_time_stmt( + StatisticsShortTerm, {0}, 0.0, set(), lateral_join_for_start_time + ) cache_key_1 = stmt._generate_cache_key() - stmt2 = _generate_statistics_at_time_stmt(StatisticsShortTerm, {0}, 0.0, set()) + stmt2 = _generate_statistics_at_time_stmt( + StatisticsShortTerm, {0}, 0.0, set(), lateral_join_for_start_time + ) cache_key_2 = stmt2._generate_cache_key() assert cache_key_1 == cache_key_2 stmt3 = _generate_statistics_at_time_stmt( - StatisticsShortTerm, {0}, 0.0, {"sum", "mean"} + StatisticsShortTerm, + {0}, + 0.0, + {"sum", "mean"}, + lateral_join_for_start_time, ) cache_key_3 = stmt3._generate_cache_key() assert cache_key_1 != cache_key_3 +@pytest.mark.skip_on_db_engine(["sqlite", "mysql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.usefixtures("recorder_db_url") +@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") +async def test_statistics_at_time_uses_lateral_query_with_postgresql( + hass: HomeAssistant, + setup_recorder: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test statistics_at_time uses a lateral query with PostgreSQL.""" + await async_wait_recording_done(hass) + assert "Compiling statistics for" not in caplog.text + assert "Statistics already compiled" not in caplog.text + + zero = dt_util.utcnow() + period1 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 00:00:00")) + period2 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 01:00:00")) + period3 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 02:00:00")) + period4 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 03:00:00")) + + external_statistics = ( + { + "start": period1, + "last_reset": None, + "state": 0, + "sum": 2, + }, + { + "start": period2, + "last_reset": None, + "state": 1, + "sum": 3, + }, + { + "start": period3, + "last_reset": None, + "state": 2, + "sum": 5, + }, + { + "start": period4, + "last_reset": None, + "state": 3, + "sum": 8, + }, + ) + external_metadata = { + "has_mean": False, + "has_sum": True, + "name": "Total imported energy", + "source": "recorder", + "statistic_id": "sensor.total_energy_import", + "unit_of_measurement": "kWh", + } + + async_import_statistics(hass, external_metadata, external_statistics) + await async_wait_recording_done(hass) + # Get change from far in the past + stats = statistics_during_period( + hass, + zero, + period="hour", + statistic_ids={"sensor.total_energy_import"}, + types={"change", "sum"}, + ) + assert stats + sqlalchemy_logs = "".join( + [ + record.getMessage() + for record in caplog.records + if record.name.startswith("sqlalchemy.engine") + ] + ) + # We can't patch inside the lambda so we have to check the logs + assert "JOIN LATERAL" in sqlalchemy_logs + + +@pytest.mark.skip_on_db_engine(["postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.usefixtures("recorder_db_url") +@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") +async def test_statistics_at_time_uses_non_lateral_query_without_postgresql( + hass: HomeAssistant, + setup_recorder: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test statistics_at_time does not use a lateral query without PostgreSQL.""" + await async_wait_recording_done(hass) + assert "Compiling statistics for" not in caplog.text + assert "Statistics already compiled" not in caplog.text + + zero = dt_util.utcnow() + period1 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 00:00:00")) + period2 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 01:00:00")) + period3 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 02:00:00")) + period4 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 03:00:00")) + + external_statistics = ( + { + "start": period1, + "last_reset": None, + "state": 0, + "sum": 2, + }, + { + "start": period2, + "last_reset": None, + "state": 1, + "sum": 3, + }, + { + "start": period3, + "last_reset": None, + "state": 2, + "sum": 5, + }, + { + "start": period4, + "last_reset": None, + "state": 3, + "sum": 8, + }, + ) + external_metadata = { + "has_mean": False, + "has_sum": True, + "name": "Total imported energy", + "source": "recorder", + "statistic_id": "sensor.total_energy_import", + "unit_of_measurement": "kWh", + } + + async_import_statistics(hass, external_metadata, external_statistics) + await async_wait_recording_done(hass) + # Get change from far in the past + stats = statistics_during_period( + hass, + zero, + period="hour", + statistic_ids={"sensor.total_energy_import"}, + types={"change", "sum"}, + ) + assert stats + sqlalchemy_logs = "".join( + [ + record.getMessage() + for record in caplog.records + if record.name.startswith("sqlalchemy.engine") + ] + ) + # We can't patch inside the lambda so we have to check the logs + assert "JOIN LATERAL" not in sqlalchemy_logs + + @pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"]) @pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") async def test_change( From 681863f80ede507acec069e92c5f6a4dd9c91d44 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Thu, 19 Dec 2024 08:32:46 +0100 Subject: [PATCH 458/677] Use mV and mA as units for electrical power measurement in Matter (#133505) --- homeassistant/components/matter/sensor.py | 12 ++++---- .../matter/snapshots/test_sensor.ambr | 28 +++++++++++++++++-- 2 files changed, 32 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index d71cd52a0c6..de4fdfe2685 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -222,10 +222,10 @@ DISCOVERY_SCHEMAS = [ platform=Platform.SENSOR, entity_description=MatterSensorEntityDescription( key="PowerSourceBatVoltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, + native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT, + suggested_unit_of_measurement=UnitOfElectricPotential.VOLT, device_class=SensorDeviceClass.VOLTAGE, entity_category=EntityCategory.DIAGNOSTIC, - measurement_to_ha=lambda x: x / 1000, state_class=SensorStateClass.MEASUREMENT, ), entity_class=MatterSensor, @@ -596,10 +596,10 @@ DISCOVERY_SCHEMAS = [ key="ElectricalPowerMeasurementVoltage", device_class=SensorDeviceClass.VOLTAGE, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfElectricPotential.VOLT, + native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT, + suggested_unit_of_measurement=UnitOfElectricPotential.VOLT, suggested_display_precision=0, state_class=SensorStateClass.MEASUREMENT, - measurement_to_ha=lambda x: x / 1000, ), entity_class=MatterSensor, required_attributes=(clusters.ElectricalPowerMeasurement.Attributes.Voltage,), @@ -610,10 +610,10 @@ DISCOVERY_SCHEMAS = [ key="ElectricalPowerMeasurementActiveCurrent", device_class=SensorDeviceClass.CURRENT, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, suggested_display_precision=2, state_class=SensorStateClass.MEASUREMENT, - measurement_to_ha=lambda x: x / 1000, ), entity_class=MatterSensor, required_attributes=( diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index 60a3d33a130..e452ce45f1d 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -1357,6 +1357,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1627,6 +1630,9 @@ 'sensor': dict({ 'suggested_display_precision': 2, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1792,6 +1798,9 @@ 'sensor': dict({ 'suggested_display_precision': 0, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1987,6 +1996,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2012,7 +2024,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3.05', + 'state': '3.050', }) # --- # name: test_sensors[eve_weather_sensor][sensor.eve_weather_battery-entry] @@ -2291,6 +2303,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2659,6 +2674,9 @@ 'sensor': dict({ 'suggested_display_precision': 2, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2886,6 +2904,9 @@ 'sensor': dict({ 'suggested_display_precision': 0, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -3034,6 +3055,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -3059,7 +3083,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.0', + 'state': '0.000', }) # --- # name: test_sensors[temperature_sensor][sensor.mock_temperature_sensor_temperature-entry] From ddd2ba6c4af0b6a8529330fc28ae29213eef316b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 19 Dec 2024 08:36:29 +0100 Subject: [PATCH 459/677] Set default min/max color temperature in hue lights (#133548) --- homeassistant/components/hue/v1/light.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/hue/v1/light.py b/homeassistant/components/hue/v1/light.py index 78a06784b8d..e9669d226f0 100644 --- a/homeassistant/components/hue/v1/light.py +++ b/homeassistant/components/hue/v1/light.py @@ -17,6 +17,8 @@ from homeassistant.components.light import ( ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, @@ -447,13 +449,13 @@ class HueLight(CoordinatorEntity, LightEntity): def max_color_temp_kelvin(self) -> int: """Return the coldest color_temp_kelvin that this light supports.""" if self.is_group: - return super().max_color_temp_kelvin + return DEFAULT_MAX_KELVIN min_mireds = self.light.controlcapabilities.get("ct", {}).get("min") # We filter out '0' too, which can be incorrectly reported by 3rd party buls if not min_mireds: - return super().max_color_temp_kelvin + return DEFAULT_MAX_KELVIN return color_util.color_temperature_mired_to_kelvin(min_mireds) @@ -461,14 +463,14 @@ class HueLight(CoordinatorEntity, LightEntity): def min_color_temp_kelvin(self) -> int: """Return the warmest color_temp_kelvin that this light supports.""" if self.is_group: - return super().min_color_temp_kelvin + return DEFAULT_MIN_KELVIN if self.is_livarno: - return 500 + return 2000 # 500 mireds max_mireds = self.light.controlcapabilities.get("ct", {}).get("max") if not max_mireds: - return super().min_color_temp_kelvin + return DEFAULT_MIN_KELVIN return color_util.color_temperature_mired_to_kelvin(max_mireds) From 893f605d61751f9b9bb1b0c478d9b13abb3dc8dc Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 19 Dec 2024 09:42:22 +0100 Subject: [PATCH 460/677] Revert "Update docker base image to 2024.12.1" (#133552) Revert "Update docker base image to 2024.12.1 (#133323)" This reverts commit 66dcd38701283e9e04d7eaa8257ad1d94448f6a6. --- build.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.yaml b/build.yaml index fafdd876f75..a8755bbbf5c 100644 --- a/build.yaml +++ b/build.yaml @@ -1,10 +1,10 @@ image: ghcr.io/home-assistant/{arch}-homeassistant build_from: - aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.1 - armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.1 - armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.1 - amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.1 - i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.1 + aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0 + armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0 + armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0 + amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0 + i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0 codenotary: signer: notary@home-assistant.io base_image: notary@home-assistant.io From c8480627ca40d0d52d1f4f590fb7b99668d35eb5 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 19 Dec 2024 09:56:32 +0100 Subject: [PATCH 461/677] Add comment motivating magic number for MySQL error codes (#133516) * Add comment motivating magic number for MySQL error codes * Pick nits --- homeassistant/components/recorder/migration.py | 2 +- homeassistant/components/recorder/util.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 33790ec65b2..d57db03f90e 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -175,7 +175,7 @@ def _unindexable_legacy_column( ) -> bool: """Ignore index errors on char(0) columns.""" # The error code is hard coded because the PyMySQL library may not be - # installed when using other database engines than MySQL or MariaDB. + # installed when using database engines other than MySQL or MariaDB. # 1167: The used storage engine can't index column '%s' return bool( base == LegacyBase diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 2e7ac0c092d..ba4c5194689 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -107,6 +107,8 @@ MAX_RESTART_TIME = timedelta(minutes=10) # Retry when one of the following MySQL errors occurred: RETRYABLE_MYSQL_ERRORS = (1205, 1206, 1213) +# The error codes are hard coded because the PyMySQL library may not be +# installed when using database engines other than MySQL or MariaDB. # 1205: Lock wait timeout exceeded; try restarting transaction # 1206: The total number of locks exceeds the lock table size # 1213: Deadlock found when trying to get lock; try restarting transaction From a3fb6e8f927ea788932a66141983dd3a357d0617 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 18 Dec 2024 23:01:40 -1000 Subject: [PATCH 462/677] Bump pydantic to 2.10.4 (#133539) changelog: https://github.com/pydantic/pydantic/compare/v2.10.3...v2.10.4 --- homeassistant/package_constraints.txt | 2 +- requirements_test.txt | 2 +- script/gen_requirements_all.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 49a6841d3a1..62de8720278 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -126,7 +126,7 @@ multidict>=6.0.2 backoff>=2.0 # ensure pydantic version does not float since it might have breaking changes -pydantic==2.10.3 +pydantic==2.10.4 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 diff --git a/requirements_test.txt b/requirements_test.txt index 98a948cd56e..e8561eba0a5 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -14,7 +14,7 @@ license-expression==30.4.0 mock-open==1.4.0 mypy-dev==1.14.0a7 pre-commit==4.0.0 -pydantic==2.10.3 +pydantic==2.10.4 pylint==3.3.2 pylint-per-file-ignores==1.3.2 pipdeptree==2.23.4 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 5cc609eec2a..71229d0b57d 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -159,7 +159,7 @@ multidict>=6.0.2 backoff>=2.0 # ensure pydantic version does not float since it might have breaking changes -pydantic==2.10.3 +pydantic==2.10.4 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 From 69a8d3f3c1d502177ffff7ac496046c2e14b64a6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 18 Dec 2024 23:01:58 -1000 Subject: [PATCH 463/677] Revert "Optimize start time state queries for PostgreSQL" (#133555) --- .../components/recorder/history/modern.py | 76 ++------ .../components/recorder/statistics.py | 61 ++---- tests/components/recorder/test_history.py | 124 ------------- tests/components/recorder/test_statistics.py | 173 +----------------- 4 files changed, 34 insertions(+), 400 deletions(-) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 279ca9c9eea..9159bbc6181 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -27,13 +27,8 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..const import LAST_REPORTED_SCHEMA_VERSION, SupportedDialect -from ..db_schema import ( - SHARED_ATTR_OR_LEGACY_ATTRIBUTES, - StateAttributes, - States, - StatesMeta, -) +from ..const import LAST_REPORTED_SCHEMA_VERSION +from ..db_schema import SHARED_ATTR_OR_LEGACY_ATTRIBUTES, StateAttributes, States from ..filters import Filters from ..models import ( LazyState, @@ -150,7 +145,6 @@ def _significant_states_stmt( no_attributes: bool, include_start_time_state: bool, run_start_ts: float | None, - lateral_join_for_start_time: bool, ) -> Select | CompoundSelect: """Query the database for significant state changes.""" include_last_changed = not significant_changes_only @@ -190,7 +184,6 @@ def _significant_states_stmt( metadata_ids, no_attributes, include_last_changed, - lateral_join_for_start_time, ).subquery(), no_attributes, include_last_changed, @@ -261,7 +254,6 @@ def get_significant_states_with_session( start_time_ts = start_time.timestamp() end_time_ts = datetime_to_timestamp_or_none(end_time) single_metadata_id = metadata_ids[0] if len(metadata_ids) == 1 else None - lateral_join_for_start_time = instance.dialect_name == SupportedDialect.POSTGRESQL stmt = lambda_stmt( lambda: _significant_states_stmt( start_time_ts, @@ -273,7 +265,6 @@ def get_significant_states_with_session( no_attributes, include_start_time_state, run_start_ts, - lateral_join_for_start_time, ), track_on=[ bool(single_metadata_id), @@ -565,61 +556,30 @@ def _get_start_time_state_for_entities_stmt( metadata_ids: list[int], no_attributes: bool, include_last_changed: bool, - lateral_join_for_start_time: bool, ) -> Select: """Baked query to get states for specific entities.""" # We got an include-list of entities, accelerate the query by filtering already # in the inner and the outer query. - if lateral_join_for_start_time: - # PostgreSQL does not support index skip scan/loose index scan - # https://wiki.postgresql.org/wiki/Loose_indexscan - # so we need to do a lateral join to get the max last_updated_ts - # for each metadata_id as a group-by is too slow. - # https://github.com/home-assistant/core/issues/132865 - max_metadata_id = StatesMeta.metadata_id.label("max_metadata_id") - max_last_updated = ( - select(func.max(States.last_updated_ts)) - .where( - (States.metadata_id == max_metadata_id) - & (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - ) - .subquery() - .lateral() - ) - most_recent_states_for_entities_by_date = ( - select(max_metadata_id, max_last_updated.c[0].label("max_last_updated")) - .select_from(StatesMeta) - .join( - max_last_updated, - StatesMeta.metadata_id == max_metadata_id, - ) - .where(StatesMeta.metadata_id.in_(metadata_ids)) - ).subquery() - else: - # Simple group-by for MySQL and SQLite, must use less - # than 1000 metadata_ids in the IN clause for MySQL - # or it will optimize poorly. - most_recent_states_for_entities_by_date = ( - select( - States.metadata_id.label("max_metadata_id"), - func.max(States.last_updated_ts).label("max_last_updated"), - ) - .filter( - (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - & States.metadata_id.in_(metadata_ids) - ) - .group_by(States.metadata_id) - .subquery() - ) - stmt = ( _stmt_and_join_attributes_for_start_state( no_attributes, include_last_changed, False ) .join( - most_recent_states_for_entities_by_date, + ( + most_recent_states_for_entities_by_date := ( + select( + States.metadata_id.label("max_metadata_id"), + func.max(States.last_updated_ts).label("max_last_updated"), + ) + .filter( + (States.last_updated_ts >= run_start_ts) + & (States.last_updated_ts < epoch_time) + & States.metadata_id.in_(metadata_ids) + ) + .group_by(States.metadata_id) + .subquery() + ) + ), and_( States.metadata_id == most_recent_states_for_entities_by_date.c.max_metadata_id, @@ -661,7 +621,6 @@ def _get_start_time_state_stmt( metadata_ids: list[int], no_attributes: bool, include_last_changed: bool, - lateral_join_for_start_time: bool, ) -> Select: """Return the states at a specific point in time.""" if single_metadata_id: @@ -682,7 +641,6 @@ def _get_start_time_state_stmt( metadata_ids, no_attributes, include_last_changed, - lateral_join_for_start_time, ) diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 9e47ca43c5b..3f1d5b981e3 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -63,7 +63,6 @@ from .db_schema import ( STATISTICS_TABLES, Statistics, StatisticsBase, - StatisticsMeta, StatisticsRuns, StatisticsShortTerm, ) @@ -1670,7 +1669,6 @@ def _augment_result_with_change( drop_sum = "sum" not in _types prev_sums = {} if tmp := _statistics_at_time( - hass, session, {metadata[statistic_id][0] for statistic_id in result}, table, @@ -2034,50 +2032,22 @@ def _generate_statistics_at_time_stmt( metadata_ids: set[int], start_time_ts: float, types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], - lateral_join_for_start_time: bool, ) -> StatementLambdaElement: """Create the statement for finding the statistics for a given time.""" stmt = _generate_select_columns_for_types_stmt(table, types) - if lateral_join_for_start_time: - # PostgreSQL does not support index skip scan/loose index scan - # https://wiki.postgresql.org/wiki/Loose_indexscan - # so we need to do a lateral join to get the max max_start_ts - # for each metadata_id as a group-by is too slow. - # https://github.com/home-assistant/core/issues/132865 - max_metadata_id = StatisticsMeta.id.label("max_metadata_id") - max_start = ( - select(func.max(table.start_ts)) - .filter(table.metadata_id == max_metadata_id) - .filter(table.start_ts < start_time_ts) - .filter(table.metadata_id.in_(metadata_ids)) - .subquery() - .lateral() - ) - most_recent_statistic_ids = ( - select(max_metadata_id, max_start.c[0].label("max_start_ts")) - .select_from(StatisticsMeta) - .join( - max_start, - StatisticsMeta.id == max_metadata_id, - ) - .where(StatisticsMeta.id.in_(metadata_ids)) - ).subquery() - else: - # Simple group-by for MySQL and SQLite, must use less - # than 1000 metadata_ids in the IN clause for MySQL - # or it will optimize poorly. - most_recent_statistic_ids = ( - select( - func.max(table.start_ts).label("max_start_ts"), - table.metadata_id.label("max_metadata_id"), - ) - .filter(table.start_ts < start_time_ts) - .filter(table.metadata_id.in_(metadata_ids)) - .group_by(table.metadata_id) - .subquery() - ) stmt += lambda q: q.join( - most_recent_statistic_ids, + ( + most_recent_statistic_ids := ( + select( + func.max(table.start_ts).label("max_start_ts"), + table.metadata_id.label("max_metadata_id"), + ) + .filter(table.start_ts < start_time_ts) + .filter(table.metadata_id.in_(metadata_ids)) + .group_by(table.metadata_id) + .subquery() + ) + ), and_( table.start_ts == most_recent_statistic_ids.c.max_start_ts, table.metadata_id == most_recent_statistic_ids.c.max_metadata_id, @@ -2087,7 +2057,6 @@ def _generate_statistics_at_time_stmt( def _statistics_at_time( - hass: HomeAssistant, session: Session, metadata_ids: set[int], table: type[StatisticsBase], @@ -2096,11 +2065,7 @@ def _statistics_at_time( ) -> Sequence[Row] | None: """Return last known statistics, earlier than start_time, for the metadata_ids.""" start_time_ts = start_time.timestamp() - dialect_name = get_instance(hass).dialect_name - lateral_join_for_start_time = dialect_name == SupportedDialect.POSTGRESQL - stmt = _generate_statistics_at_time_stmt( - table, metadata_ids, start_time_ts, types, lateral_join_for_start_time - ) + stmt = _generate_statistics_at_time_stmt(table, metadata_ids, start_time_ts, types) return cast(Sequence[Row], execute_stmt_lambda_element(session, stmt)) diff --git a/tests/components/recorder/test_history.py b/tests/components/recorder/test_history.py index eea4605039b..28b8275247c 100644 --- a/tests/components/recorder/test_history.py +++ b/tests/components/recorder/test_history.py @@ -1014,127 +1014,3 @@ async def test_get_last_state_changes_with_non_existent_entity_ids_returns_empty ) -> None: """Test get_last_state_changes returns an empty dict when entities not in the db.""" assert history.get_last_state_changes(hass, 1, "nonexistent.entity") == {} - - -@pytest.mark.skip_on_db_engine(["sqlite", "mysql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.usefixtures("recorder_db_url") -async def test_get_significant_states_with_session_uses_lateral_with_postgresql( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test get_significant_states_with_session uses the lateral path with PostgreSQL.""" - entity_id = "media_player.test" - hass.states.async_set("any.other", "on") - await async_wait_recording_done(hass) - hass.states.async_set(entity_id, "off") - - def set_state(state): - """Set the state.""" - hass.states.async_set(entity_id, state, {"any": 1}) - return hass.states.get(entity_id) - - start = dt_util.utcnow().replace(microsecond=0) - point = start + timedelta(seconds=1) - point2 = start + timedelta(seconds=1, microseconds=100) - point3 = start + timedelta(seconds=1, microseconds=200) - end = point + timedelta(seconds=1, microseconds=400) - - with freeze_time(start) as freezer: - set_state("idle") - set_state("YouTube") - - freezer.move_to(point) - states = [set_state("idle")] - - freezer.move_to(point2) - states.append(set_state("Netflix")) - - freezer.move_to(point3) - states.append(set_state("Plex")) - - freezer.move_to(end) - set_state("Netflix") - set_state("Plex") - await async_wait_recording_done(hass) - - start_time = point2 + timedelta(microseconds=10) - hist = history.get_significant_states( - hass=hass, - start_time=start_time, # Pick a point where we will generate a start time state - end_time=end, - entity_ids=[entity_id, "any.other"], - include_start_time_state=True, - ) - assert len(hist[entity_id]) == 2 - - sqlalchemy_logs = "".join( - [ - record.getMessage() - for record in caplog.records - if record.name.startswith("sqlalchemy.engine") - ] - ) - # We can't patch inside the lambda so we have to check the logs - assert "JOIN LATERAL" in sqlalchemy_logs - - -@pytest.mark.skip_on_db_engine(["postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.usefixtures("recorder_db_url") -async def test_get_significant_states_with_session_uses_non_lateral_without_postgresql( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test get_significant_states_with_session does not use a the lateral path without PostgreSQL.""" - entity_id = "media_player.test" - hass.states.async_set("any.other", "on") - await async_wait_recording_done(hass) - hass.states.async_set(entity_id, "off") - - def set_state(state): - """Set the state.""" - hass.states.async_set(entity_id, state, {"any": 1}) - return hass.states.get(entity_id) - - start = dt_util.utcnow().replace(microsecond=0) - point = start + timedelta(seconds=1) - point2 = start + timedelta(seconds=1, microseconds=100) - point3 = start + timedelta(seconds=1, microseconds=200) - end = point + timedelta(seconds=1, microseconds=400) - - with freeze_time(start) as freezer: - set_state("idle") - set_state("YouTube") - - freezer.move_to(point) - states = [set_state("idle")] - - freezer.move_to(point2) - states.append(set_state("Netflix")) - - freezer.move_to(point3) - states.append(set_state("Plex")) - - freezer.move_to(end) - set_state("Netflix") - set_state("Plex") - await async_wait_recording_done(hass) - - start_time = point2 + timedelta(microseconds=10) - hist = history.get_significant_states( - hass=hass, - start_time=start_time, # Pick a point where we will generate a start time state - end_time=end, - entity_ids=[entity_id, "any.other"], - include_start_time_state=True, - ) - assert len(hist[entity_id]) == 2 - - sqlalchemy_logs = "".join( - [ - record.getMessage() - for record in caplog.records - if record.name.startswith("sqlalchemy.engine") - ] - ) - # We can't patch inside the lambda so we have to check the logs - assert "JOIN LATERAL" not in sqlalchemy_logs diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index 55029c3eacf..6b1e1a655db 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -1914,185 +1914,20 @@ def test_cache_key_for_generate_max_mean_min_statistic_in_sub_period_stmt() -> N assert cache_key_1 != cache_key_3 -@pytest.mark.parametrize("lateral_join_for_start_time", [True, False]) -def test_cache_key_for_generate_statistics_at_time_stmt( - lateral_join_for_start_time: bool, -) -> None: +def test_cache_key_for_generate_statistics_at_time_stmt() -> None: """Test cache key for _generate_statistics_at_time_stmt.""" - stmt = _generate_statistics_at_time_stmt( - StatisticsShortTerm, {0}, 0.0, set(), lateral_join_for_start_time - ) + stmt = _generate_statistics_at_time_stmt(StatisticsShortTerm, {0}, 0.0, set()) cache_key_1 = stmt._generate_cache_key() - stmt2 = _generate_statistics_at_time_stmt( - StatisticsShortTerm, {0}, 0.0, set(), lateral_join_for_start_time - ) + stmt2 = _generate_statistics_at_time_stmt(StatisticsShortTerm, {0}, 0.0, set()) cache_key_2 = stmt2._generate_cache_key() assert cache_key_1 == cache_key_2 stmt3 = _generate_statistics_at_time_stmt( - StatisticsShortTerm, - {0}, - 0.0, - {"sum", "mean"}, - lateral_join_for_start_time, + StatisticsShortTerm, {0}, 0.0, {"sum", "mean"} ) cache_key_3 = stmt3._generate_cache_key() assert cache_key_1 != cache_key_3 -@pytest.mark.skip_on_db_engine(["sqlite", "mysql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.usefixtures("recorder_db_url") -@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") -async def test_statistics_at_time_uses_lateral_query_with_postgresql( - hass: HomeAssistant, - setup_recorder: None, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test statistics_at_time uses a lateral query with PostgreSQL.""" - await async_wait_recording_done(hass) - assert "Compiling statistics for" not in caplog.text - assert "Statistics already compiled" not in caplog.text - - zero = dt_util.utcnow() - period1 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 00:00:00")) - period2 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 01:00:00")) - period3 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 02:00:00")) - period4 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 03:00:00")) - - external_statistics = ( - { - "start": period1, - "last_reset": None, - "state": 0, - "sum": 2, - }, - { - "start": period2, - "last_reset": None, - "state": 1, - "sum": 3, - }, - { - "start": period3, - "last_reset": None, - "state": 2, - "sum": 5, - }, - { - "start": period4, - "last_reset": None, - "state": 3, - "sum": 8, - }, - ) - external_metadata = { - "has_mean": False, - "has_sum": True, - "name": "Total imported energy", - "source": "recorder", - "statistic_id": "sensor.total_energy_import", - "unit_of_measurement": "kWh", - } - - async_import_statistics(hass, external_metadata, external_statistics) - await async_wait_recording_done(hass) - # Get change from far in the past - stats = statistics_during_period( - hass, - zero, - period="hour", - statistic_ids={"sensor.total_energy_import"}, - types={"change", "sum"}, - ) - assert stats - sqlalchemy_logs = "".join( - [ - record.getMessage() - for record in caplog.records - if record.name.startswith("sqlalchemy.engine") - ] - ) - # We can't patch inside the lambda so we have to check the logs - assert "JOIN LATERAL" in sqlalchemy_logs - - -@pytest.mark.skip_on_db_engine(["postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") -@pytest.mark.usefixtures("recorder_db_url") -@pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") -async def test_statistics_at_time_uses_non_lateral_query_without_postgresql( - hass: HomeAssistant, - setup_recorder: None, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test statistics_at_time does not use a lateral query without PostgreSQL.""" - await async_wait_recording_done(hass) - assert "Compiling statistics for" not in caplog.text - assert "Statistics already compiled" not in caplog.text - - zero = dt_util.utcnow() - period1 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 00:00:00")) - period2 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 01:00:00")) - period3 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 02:00:00")) - period4 = dt_util.as_utc(dt_util.parse_datetime("2023-05-08 03:00:00")) - - external_statistics = ( - { - "start": period1, - "last_reset": None, - "state": 0, - "sum": 2, - }, - { - "start": period2, - "last_reset": None, - "state": 1, - "sum": 3, - }, - { - "start": period3, - "last_reset": None, - "state": 2, - "sum": 5, - }, - { - "start": period4, - "last_reset": None, - "state": 3, - "sum": 8, - }, - ) - external_metadata = { - "has_mean": False, - "has_sum": True, - "name": "Total imported energy", - "source": "recorder", - "statistic_id": "sensor.total_energy_import", - "unit_of_measurement": "kWh", - } - - async_import_statistics(hass, external_metadata, external_statistics) - await async_wait_recording_done(hass) - # Get change from far in the past - stats = statistics_during_period( - hass, - zero, - period="hour", - statistic_ids={"sensor.total_energy_import"}, - types={"change", "sum"}, - ) - assert stats - sqlalchemy_logs = "".join( - [ - record.getMessage() - for record in caplog.records - if record.name.startswith("sqlalchemy.engine") - ] - ) - # We can't patch inside the lambda so we have to check the logs - assert "JOIN LATERAL" not in sqlalchemy_logs - - @pytest.mark.parametrize("timezone", ["America/Regina", "Europe/Vienna", "UTC"]) @pytest.mark.freeze_time("2022-10-01 00:00:00+00:00") async def test_change( From cd384cadbef19cc23987f5994c30a8ee69d52a15 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Thu, 19 Dec 2024 10:04:26 +0100 Subject: [PATCH 464/677] Fulfill IQS rule config-flow in ViCare integration (#133524) * add data_description * Apply suggestions from code review Co-authored-by: Josef Zweck --------- Co-authored-by: Josef Zweck --- homeassistant/components/vicare/quality_scale.yaml | 4 +--- homeassistant/components/vicare/strings.json | 10 ++++++++++ 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/quality_scale.yaml b/homeassistant/components/vicare/quality_scale.yaml index 436e046204f..959e2e90583 100644 --- a/homeassistant/components/vicare/quality_scale.yaml +++ b/homeassistant/components/vicare/quality_scale.yaml @@ -1,8 +1,6 @@ rules: # Bronze - config-flow: - status: todo - comment: data_description is missing. + config-flow: done test-before-configure: done unique-config-entry: status: todo diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 77e570da779..4934507e41c 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -9,6 +9,12 @@ "password": "[%key:common::config_flow::data::password%]", "client_id": "Client ID", "heating_type": "Heating type" + }, + "data_description": { + "username": "The email address to login to your ViCare account.", + "password": "The password to login to your ViCare account.", + "client_id": "The ID of the API client created in the Viessmann developer portal.", + "heating_type": "Allows to overrule the device auto detection." } }, "reauth_confirm": { @@ -16,6 +22,10 @@ "data": { "password": "[%key:common::config_flow::data::password%]", "client_id": "[%key:component::vicare::config::step::user::data::client_id%]" + }, + "data_description": { + "password": "[%key:component::vicare::config::step::user::data_description::password%]", + "client_id": "[%key:component::vicare::config::step::user::data_description::client_id%]" } } }, From a76f82080bd7ebabb8f502f2c71e6141efa1ac17 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 19 Dec 2024 10:40:07 +0100 Subject: [PATCH 465/677] Create repair issues when automatic backup fails (#133513) * Create repair issues when automatic backup fails * Improve test coverage * Adjust issues --- homeassistant/components/backup/manager.py | 43 +++- homeassistant/components/backup/strings.json | 10 + tests/components/backup/test_manager.py | 209 +++++++++++++++++++ 3 files changed, 261 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 99373b1942a..4a0b8553f1c 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -23,7 +23,11 @@ from homeassistant.backup_restore import RESTORE_BACKUP_FILE, password_to_key from homeassistant.const import __version__ as HAVERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import instance_id, integration_platform +from homeassistant.helpers import ( + instance_id, + integration_platform, + issue_registry as ir, +) from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util @@ -691,6 +695,8 @@ class BackupManager: CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) ) self.async_on_backup_event(IdleEvent()) + if with_automatic_settings: + self._update_issue_backup_failed() raise async def _async_create_backup( @@ -750,6 +756,8 @@ class BackupManager: self.async_on_backup_event( CreateBackupEvent(stage=None, state=CreateBackupState.FAILED) ) + if with_automatic_settings: + self._update_issue_backup_failed() else: LOGGER.debug( "Generated new backup with backup_id %s, uploading to agents %s", @@ -772,6 +780,7 @@ class BackupManager: # create backup was successful, update last_completed_automatic_backup self.config.data.last_completed_automatic_backup = dt_util.now() self.store.save() + self._update_issue_after_agent_upload(agent_errors) self.known_backups.add(written_backup.backup, agent_errors) # delete old backups more numerous than copies @@ -878,6 +887,38 @@ class BackupManager: self._backup_event_subscriptions.append(on_event) return remove_subscription + def _update_issue_backup_failed(self) -> None: + """Update issue registry when a backup fails.""" + ir.async_create_issue( + self.hass, + DOMAIN, + "automatic_backup_failed", + is_fixable=False, + is_persistent=True, + learn_more_url="homeassistant://config/backup", + severity=ir.IssueSeverity.WARNING, + translation_key="automatic_backup_failed_create", + ) + + def _update_issue_after_agent_upload( + self, agent_errors: dict[str, Exception] + ) -> None: + """Update issue registry after a backup is uploaded to agents.""" + if not agent_errors: + ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed") + return + ir.async_create_issue( + self.hass, + DOMAIN, + "automatic_backup_failed", + is_fixable=False, + is_persistent=True, + learn_more_url="homeassistant://config/backup", + severity=ir.IssueSeverity.WARNING, + translation_key="automatic_backup_failed_upload_agents", + translation_placeholders={"failed_agents": ", ".join(agent_errors)}, + ) + class KnownBackups: """Track known backups.""" diff --git a/homeassistant/components/backup/strings.json b/homeassistant/components/backup/strings.json index 6ad3416b1b9..d9de2bff861 100644 --- a/homeassistant/components/backup/strings.json +++ b/homeassistant/components/backup/strings.json @@ -1,4 +1,14 @@ { + "issues": { + "automatic_backup_failed_create": { + "title": "Automatic backup could not be created", + "description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured." + }, + "automatic_backup_failed_upload_agents": { + "title": "Automatic backup could not be uploaded to agents", + "description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured." + } + }, "services": { "create": { "name": "Create backup", diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 5795309501d..e976ad0c099 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -34,6 +34,7 @@ from homeassistant.components.backup.manager import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from .common import ( @@ -534,6 +535,214 @@ async def test_async_initiate_backup_with_agent_error( ] +@pytest.mark.usefixtures("mock_backup_generation") +@pytest.mark.parametrize( + ("create_backup_command", "issues_after_create_backup"), + [ + ( + {"type": "backup/generate", "agent_ids": [LOCAL_AGENT_ID]}, + {(DOMAIN, "automatic_backup_failed")}, + ), + ( + {"type": "backup/generate_with_automatic_settings"}, + set(), + ), + ], +) +async def test_create_backup_success_clears_issue( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + create_backup_command: dict[str, Any], + issues_after_create_backup: set[tuple[str, str]], +) -> None: + """Test backup issue is cleared after backup is created.""" + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + # Create a backup issue + ir.async_create_issue( + hass, + DOMAIN, + "automatic_backup_failed", + is_fixable=False, + is_persistent=True, + severity=ir.IssueSeverity.WARNING, + translation_key="automatic_backup_failed_create", + ) + + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": [LOCAL_AGENT_ID]}, + } + ) + result = await ws_client.receive_json() + assert result["success"] is True + + await ws_client.send_json_auto_id(create_backup_command) + result = await ws_client.receive_json() + assert result["success"] is True + + await hass.async_block_till_done() + + issue_registry = ir.async_get(hass) + assert set(issue_registry.issues) == issues_after_create_backup + + +async def delayed_boom(*args, **kwargs) -> None: + """Raise an exception after a delay.""" + + async def delayed_boom() -> None: + await asyncio.sleep(0) + raise Exception("Boom!") # noqa: TRY002 + + return (NewBackup(backup_job_id="abc123"), delayed_boom()) + + +@pytest.mark.parametrize( + ( + "create_backup_command", + "create_backup_side_effect", + "agent_upload_side_effect", + "create_backup_result", + "issues_after_create_backup", + ), + [ + # No error + ( + {"type": "backup/generate", "agent_ids": ["test.remote"]}, + None, + None, + True, + {}, + ), + ( + {"type": "backup/generate_with_automatic_settings"}, + None, + None, + True, + {}, + ), + # Error raised in async_initiate_backup + ( + {"type": "backup/generate", "agent_ids": ["test.remote"]}, + Exception("Boom!"), + None, + False, + {}, + ), + ( + {"type": "backup/generate_with_automatic_settings"}, + Exception("Boom!"), + None, + False, + { + (DOMAIN, "automatic_backup_failed"): { + "translation_key": "automatic_backup_failed_create", + "translation_placeholders": None, + } + }, + ), + # Error raised when awaiting the backup task + ( + {"type": "backup/generate", "agent_ids": ["test.remote"]}, + delayed_boom, + None, + True, + {}, + ), + ( + {"type": "backup/generate_with_automatic_settings"}, + delayed_boom, + None, + True, + { + (DOMAIN, "automatic_backup_failed"): { + "translation_key": "automatic_backup_failed_create", + "translation_placeholders": None, + } + }, + ), + # Error raised in async_upload_backup + ( + {"type": "backup/generate", "agent_ids": ["test.remote"]}, + None, + Exception("Boom!"), + True, + {}, + ), + ( + {"type": "backup/generate_with_automatic_settings"}, + None, + Exception("Boom!"), + True, + { + (DOMAIN, "automatic_backup_failed"): { + "translation_key": "automatic_backup_failed_upload_agents", + "translation_placeholders": {"failed_agents": "test.remote"}, + } + }, + ), + ], +) +async def test_create_backup_failure_raises_issue( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + create_backup: AsyncMock, + create_backup_command: dict[str, Any], + create_backup_side_effect: Exception | None, + agent_upload_side_effect: Exception | None, + create_backup_result: bool, + issues_after_create_backup: dict[tuple[str, str], dict[str, Any]], +) -> None: + """Test backup issue is cleared after backup is created.""" + remote_agent = BackupAgentTest("remote", backups=[]) + + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + + ws_client = await hass_ws_client(hass) + + create_backup.side_effect = create_backup_side_effect + + await ws_client.send_json_auto_id( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.remote"]}, + } + ) + result = await ws_client.receive_json() + assert result["success"] is True + + with patch.object( + remote_agent, "async_upload_backup", side_effect=agent_upload_side_effect + ): + await ws_client.send_json_auto_id(create_backup_command) + result = await ws_client.receive_json() + assert result["success"] == create_backup_result + await hass.async_block_till_done() + + issue_registry = ir.async_get(hass) + assert set(issue_registry.issues) == set(issues_after_create_backup) + for issue_id, issue_data in issues_after_create_backup.items(): + issue = issue_registry.issues[issue_id] + assert issue.translation_key == issue_data["translation_key"] + assert issue.translation_placeholders == issue_data["translation_placeholders"] + + async def test_loading_platforms( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, From 3568bdca655caa831abf5dfc39a2d3742eb26530 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 19 Dec 2024 10:48:43 +0100 Subject: [PATCH 466/677] Update Home Assistant base image to 2024.12.0 (#133558) --- build.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.yaml b/build.yaml index a8755bbbf5c..e6e149cf700 100644 --- a/build.yaml +++ b/build.yaml @@ -1,10 +1,10 @@ image: ghcr.io/home-assistant/{arch}-homeassistant build_from: - aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0 - armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0 - armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0 - amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0 - i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0 + aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.0 + armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.0 + armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.0 + amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.0 + i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.0 codenotary: signer: notary@home-assistant.io base_image: notary@home-assistant.io From 79484ea7f5564928ab498f8b895465751ef82efe Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Thu, 19 Dec 2024 10:50:12 +0100 Subject: [PATCH 467/677] Grammar fixes for action names and descriptions (#133559) Several KNX actions contain a wrong "s" at the end of their verbs while those are missing in several of the descriptions. This commit changes all those to make them consistent with the remaining actions in KNX and the standard terminology in Home Assistant. --- homeassistant/components/knx/strings.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index 08b921f316b..d697fa79e78 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -360,8 +360,8 @@ } }, "read": { - "name": "Reads from KNX bus", - "description": "Send GroupValueRead requests to the KNX bus. Response can be used from `knx_event` and will be processed in KNX entities.", + "name": "Read from KNX bus", + "description": "Sends GroupValueRead requests to the KNX bus. Response can be used from `knx_event` and will be processed in KNX entities.", "fields": { "address": { "name": "[%key:component::knx::services::send::fields::address::name%]", @@ -370,8 +370,8 @@ } }, "event_register": { - "name": "Registers knx_event", - "description": "Add or remove group addresses to knx_event filter for triggering `knx_event`s. Only addresses added with this service can be removed.", + "name": "Register knx_event", + "description": "Adds or removes group addresses to knx_event filter for triggering `knx_event`s. Only addresses added with this service can be removed.", "fields": { "address": { "name": "[%key:component::knx::services::send::fields::address::name%]", @@ -389,7 +389,7 @@ }, "exposure_register": { "name": "Expose to KNX bus", - "description": "Adds or remove exposures to KNX bus. Only exposures added with this service can be removed.", + "description": "Adds or removes exposures to KNX bus. Only exposures added with this service can be removed.", "fields": { "address": { "name": "[%key:component::knx::services::send::fields::address::name%]", From 9a6c749714fdfff24af830da0cbea25634d39efc Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Thu, 19 Dec 2024 10:51:30 +0100 Subject: [PATCH 468/677] Change 'GSuite' to 'Workspace', fix 'Start' field label (#133554) * Change 'GSuite' to 'Workspace', fix 'Start' field label Several years ago Google renamed "G Suite" to "Google Workspace", this commit applies the same change to one of the field descriptions of the set_vacation action. In addition the "Start" field of the action currently uses the common action (!) for Start which is wrong in this context, it stands for the beginning here. This commit changes this back to a local definition of this label just like "End". In German for example "Start" needs to be "Beginn" in this context while the common action is translated as "Starten". * Use "Google Workspace" for more clarity Co-authored-by: Joost Lekkerkerker --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/google_mail/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/google_mail/strings.json b/homeassistant/components/google_mail/strings.json index 2c6e24109c3..f93a8581e1c 100644 --- a/homeassistant/components/google_mail/strings.json +++ b/homeassistant/components/google_mail/strings.json @@ -68,10 +68,10 @@ }, "restrict_domain": { "name": "Restrict to domain", - "description": "Restrict automatic reply to domain. This only affects GSuite accounts." + "description": "Restrict automatic reply to domain. This only affects Google Workspace accounts." }, "start": { - "name": "[%key:common::action::start%]", + "name": "Start", "description": "First day of the vacation." }, "end": { From 1c119518db79ab73a4338708769920793a9d7265 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Dec 2024 10:52:10 +0100 Subject: [PATCH 469/677] Bump codecov/codecov-action from 5.1.1 to 5.1.2 (#133547) Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 5.1.1 to 5.1.2. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v5.1.1...v5.1.2) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 71924afecc8..98f4fb04e34 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1273,7 +1273,7 @@ jobs: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'true' - uses: codecov/codecov-action@v5.1.1 + uses: codecov/codecov-action@v5.1.2 with: fail_ci_if_error: true flags: full-suite @@ -1411,7 +1411,7 @@ jobs: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'false' - uses: codecov/codecov-action@v5.1.1 + uses: codecov/codecov-action@v5.1.2 with: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} From d35b34f14226975cbf042cb0f8ed602d28b00b74 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 19 Dec 2024 00:14:32 -1000 Subject: [PATCH 470/677] Replace start time state query with single correlated scalar subquery (#133553) --- .../components/recorder/history/modern.py | 55 ++++++++++--------- .../components/recorder/statistics.py | 44 +++++++++------ 2 files changed, 57 insertions(+), 42 deletions(-) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index 9159bbc6181..e9af4a673c3 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -28,7 +28,12 @@ from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util from ..const import LAST_REPORTED_SCHEMA_VERSION -from ..db_schema import SHARED_ATTR_OR_LEGACY_ATTRIBUTES, StateAttributes, States +from ..db_schema import ( + SHARED_ATTR_OR_LEGACY_ATTRIBUTES, + StateAttributes, + States, + StatesMeta, +) from ..filters import Filters from ..models import ( LazyState, @@ -558,40 +563,38 @@ def _get_start_time_state_for_entities_stmt( include_last_changed: bool, ) -> Select: """Baked query to get states for specific entities.""" - # We got an include-list of entities, accelerate the query by filtering already - # in the inner and the outer query. + # This query is the result of significant research in + # https://github.com/home-assistant/core/issues/132865 + # A reverse index scan with a limit 1 is the fastest way to get the + # last state change before a specific point in time for all supported + # databases. Since all databases support this query as a join + # condition we can use it as a subquery to get the last state change + # before a specific point in time for all entities. stmt = ( _stmt_and_join_attributes_for_start_state( no_attributes, include_last_changed, False ) + .select_from(StatesMeta) .join( - ( - most_recent_states_for_entities_by_date := ( - select( - States.metadata_id.label("max_metadata_id"), - func.max(States.last_updated_ts).label("max_last_updated"), - ) - .filter( - (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - & States.metadata_id.in_(metadata_ids) - ) - .group_by(States.metadata_id) - .subquery() - ) - ), + States, and_( - States.metadata_id - == most_recent_states_for_entities_by_date.c.max_metadata_id, States.last_updated_ts - == most_recent_states_for_entities_by_date.c.max_last_updated, + == ( + select(States.last_updated_ts) + .where( + (StatesMeta.metadata_id == States.metadata_id) + & (States.last_updated_ts < epoch_time) + & (States.last_updated_ts >= run_start_ts) + ) + .order_by(States.last_updated_ts.desc()) + .limit(1) + ) + .scalar_subquery() + .correlate(StatesMeta), + States.metadata_id == StatesMeta.metadata_id, ), ) - .filter( - (States.last_updated_ts >= run_start_ts) - & (States.last_updated_ts < epoch_time) - & States.metadata_id.in_(metadata_ids) - ) + .where(StatesMeta.metadata_id.in_(metadata_ids)) ) if no_attributes: return stmt diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 3f1d5b981e3..c6783a5cbc2 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -63,6 +63,7 @@ from .db_schema import ( STATISTICS_TABLES, Statistics, StatisticsBase, + StatisticsMeta, StatisticsRuns, StatisticsShortTerm, ) @@ -2034,24 +2035,35 @@ def _generate_statistics_at_time_stmt( types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], ) -> StatementLambdaElement: """Create the statement for finding the statistics for a given time.""" + # This query is the result of significant research in + # https://github.com/home-assistant/core/issues/132865 + # A reverse index scan with a limit 1 is the fastest way to get the + # last start_time_ts before a specific point in time for all supported + # databases. Since all databases support this query as a join + # condition we can use it as a subquery to get the last start_time_ts + # before a specific point in time for all entities. stmt = _generate_select_columns_for_types_stmt(table, types) - stmt += lambda q: q.join( - ( - most_recent_statistic_ids := ( - select( - func.max(table.start_ts).label("max_start_ts"), - table.metadata_id.label("max_metadata_id"), + stmt += ( + lambda q: q.select_from(StatisticsMeta) + .join( + table, + and_( + table.start_ts + == ( + select(table.start_ts) + .where( + (StatisticsMeta.id == table.metadata_id) + & (table.start_ts < start_time_ts) + ) + .order_by(table.start_ts.desc()) + .limit(1) ) - .filter(table.start_ts < start_time_ts) - .filter(table.metadata_id.in_(metadata_ids)) - .group_by(table.metadata_id) - .subquery() - ) - ), - and_( - table.start_ts == most_recent_statistic_ids.c.max_start_ts, - table.metadata_id == most_recent_statistic_ids.c.max_metadata_id, - ), + .scalar_subquery() + .correlate(StatisticsMeta), + table.metadata_id == StatisticsMeta.id, + ), + ) + .where(table.metadata_id.in_(metadata_ids)) ) return stmt From bb7abd037c5e7e6c077170e3fa881959b78957e0 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 19 Dec 2024 11:50:12 +0100 Subject: [PATCH 471/677] Revert "Revert "Improve recorder history queries (#131702)"" (#133561) Revert "Revert "Improve recorder history queries (#131702)" (#133203)" This reverts commit 74e4654c26177909e653921f27f838fd1366adc0. --- homeassistant/components/history/__init__.py | 7 ++-- homeassistant/components/history/helpers.py | 13 ++++---- .../components/history/websocket_api.py | 7 ++-- homeassistant/components/recorder/core.py | 1 + .../components/recorder/history/legacy.py | 18 +++++------ .../components/recorder/history/modern.py | 31 +++++++++--------- homeassistant/components/recorder/purge.py | 3 ++ homeassistant/components/recorder/queries.py | 9 ++++++ .../recorder/table_managers/states.py | 32 +++++++++++++++++++ homeassistant/components/recorder/tasks.py | 2 -- tests/components/recorder/test_purge.py | 17 ++++++++++ 11 files changed, 102 insertions(+), 38 deletions(-) diff --git a/homeassistant/components/history/__init__.py b/homeassistant/components/history/__init__.py index 365be06fd2d..7241e1fac9a 100644 --- a/homeassistant/components/history/__init__.py +++ b/homeassistant/components/history/__init__.py @@ -22,7 +22,7 @@ import homeassistant.util.dt as dt_util from . import websocket_api from .const import DOMAIN -from .helpers import entities_may_have_state_changes_after, has_recorder_run_after +from .helpers import entities_may_have_state_changes_after, has_states_before CONF_ORDER = "use_include_order" @@ -107,7 +107,10 @@ class HistoryPeriodView(HomeAssistantView): no_attributes = "no_attributes" in request.query if ( - (end_time and not has_recorder_run_after(hass, end_time)) + # has_states_before will return True if there are states older than + # end_time. If it's false, we know there are no states in the + # database up until end_time. + (end_time and not has_states_before(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/history/helpers.py b/homeassistant/components/history/helpers.py index bd477e7e4ed..2010b7373ff 100644 --- a/homeassistant/components/history/helpers.py +++ b/homeassistant/components/history/helpers.py @@ -6,7 +6,6 @@ from collections.abc import Iterable from datetime import datetime as dt from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import process_timestamp from homeassistant.core import HomeAssistant @@ -26,8 +25,10 @@ def entities_may_have_state_changes_after( return False -def has_recorder_run_after(hass: HomeAssistant, run_time: dt) -> bool: - """Check if the recorder has any runs after a specific time.""" - return run_time >= process_timestamp( - get_instance(hass).recorder_runs_manager.first.start - ) +def has_states_before(hass: HomeAssistant, run_time: dt) -> bool: + """Check if the recorder has states as old or older than run_time. + + Returns True if there may be such states. + """ + oldest_ts = get_instance(hass).states_manager.oldest_ts + return oldest_ts is not None and run_time.timestamp() >= oldest_ts diff --git a/homeassistant/components/history/websocket_api.py b/homeassistant/components/history/websocket_api.py index c85d975c3c9..35f8ed5f1ac 100644 --- a/homeassistant/components/history/websocket_api.py +++ b/homeassistant/components/history/websocket_api.py @@ -39,7 +39,7 @@ from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util from .const import EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES -from .helpers import entities_may_have_state_changes_after, has_recorder_run_after +from .helpers import entities_may_have_state_changes_after, has_states_before _LOGGER = logging.getLogger(__name__) @@ -142,7 +142,10 @@ async def ws_get_history_during_period( no_attributes = msg["no_attributes"] if ( - (end_time and not has_recorder_run_after(hass, end_time)) + # has_states_before will return True if there are states older than + # end_time. If it's false, we know there are no states in the + # database up until end_time. + (end_time and not has_states_before(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 9d9b70586a6..61c64be105c 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -1431,6 +1431,7 @@ class Recorder(threading.Thread): with session_scope(session=self.get_session()) as session: end_incomplete_runs(session, self.recorder_runs_manager.recording_start) self.recorder_runs_manager.start(session) + self.states_manager.load_from_db(session) self._open_event_session() diff --git a/homeassistant/components/recorder/history/legacy.py b/homeassistant/components/recorder/history/legacy.py index da90b296fe3..dc49ebb9768 100644 --- a/homeassistant/components/recorder/history/legacy.py +++ b/homeassistant/components/recorder/history/legacy.py @@ -22,9 +22,9 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..db_schema import RecorderRuns, StateAttributes, States +from ..db_schema import StateAttributes, States from ..filters import Filters -from ..models import process_timestamp, process_timestamp_to_utc_isoformat +from ..models import process_timestamp_to_utc_isoformat from ..models.legacy import LegacyLazyState, legacy_row_to_compressed_state from ..util import execute_stmt_lambda_element, session_scope from .const import ( @@ -436,7 +436,7 @@ def get_last_state_changes( def _get_states_for_entities_stmt( - run_start: datetime, + run_start_ts: float, utc_point_in_time: datetime, entity_ids: list[str], no_attributes: bool, @@ -447,7 +447,6 @@ def _get_states_for_entities_stmt( ) # We got an include-list of entities, accelerate the query by filtering already # in the inner query. - run_start_ts = process_timestamp(run_start).timestamp() utc_point_in_time_ts = utc_point_in_time.timestamp() stmt += lambda q: q.join( ( @@ -483,7 +482,7 @@ def _get_rows_with_session( session: Session, utc_point_in_time: datetime, entity_ids: list[str], - run: RecorderRuns | None = None, + *, no_attributes: bool = False, ) -> Iterable[Row]: """Return the states at a specific point in time.""" @@ -495,17 +494,16 @@ def _get_rows_with_session( ), ) - if run is None: - run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) + oldest_ts = get_instance(hass).states_manager.oldest_ts - if run is None or process_timestamp(run.start) > utc_point_in_time: - # History did not run before utc_point_in_time + if oldest_ts is None or oldest_ts > utc_point_in_time.timestamp(): + # We don't have any states for the requested time return [] # We have more than one entity to look at so we need to do a query on states # since the last recorder run started. stmt = _get_states_for_entities_stmt( - run.start, utc_point_in_time, entity_ids, no_attributes + oldest_ts, utc_point_in_time, entity_ids, no_attributes ) return execute_stmt_lambda_element(session, stmt) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index e9af4a673c3..a8902e184ec 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -39,7 +39,6 @@ from ..models import ( LazyState, datetime_to_timestamp_or_none, extract_metadata_ids, - process_timestamp, row_to_compressed_state, ) from ..util import execute_stmt_lambda_element, session_scope @@ -251,9 +250,9 @@ def get_significant_states_with_session( if metadata_id is not None and split_entity_id(entity_id)[0] in SIGNIFICANT_DOMAINS ] - run_start_ts: float | None = None + oldest_ts: float | None = None if include_start_time_state and not ( - run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) + oldest_ts := _get_oldest_possible_ts(hass, start_time) ): include_start_time_state = False start_time_ts = start_time.timestamp() @@ -269,7 +268,7 @@ def get_significant_states_with_session( significant_changes_only, no_attributes, include_start_time_state, - run_start_ts, + oldest_ts, ), track_on=[ bool(single_metadata_id), @@ -416,9 +415,9 @@ def state_changes_during_period( entity_id_to_metadata_id: dict[str, int | None] = { entity_id: single_metadata_id } - run_start_ts: float | None = None + oldest_ts: float | None = None if include_start_time_state and not ( - run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) + oldest_ts := _get_oldest_possible_ts(hass, start_time) ): include_start_time_state = False start_time_ts = start_time.timestamp() @@ -431,7 +430,7 @@ def state_changes_during_period( no_attributes, limit, include_start_time_state, - run_start_ts, + oldest_ts, has_last_reported, ), track_on=[ @@ -603,17 +602,17 @@ def _get_start_time_state_for_entities_stmt( ) -def _get_run_start_ts_for_utc_point_in_time( +def _get_oldest_possible_ts( hass: HomeAssistant, utc_point_in_time: datetime ) -> float | None: - """Return the start time of a run.""" - run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) - if ( - run is not None - and (run_start := process_timestamp(run.start)) < utc_point_in_time - ): - return run_start.timestamp() - # History did not run before utc_point_in_time but we still + """Return the oldest possible timestamp. + + Returns None if there are no states as old as utc_point_in_time. + """ + + oldest_ts = get_instance(hass).states_manager.oldest_ts + if oldest_ts is not None and oldest_ts < utc_point_in_time.timestamp(): + return oldest_ts return None diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index eb67300e8d4..11f5accc978 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -122,6 +122,9 @@ def purge_old_data( _purge_old_entity_ids(instance, session) _purge_old_recorder_runs(instance, session, purge_before) + with session_scope(session=instance.get_session(), read_only=True) as session: + instance.recorder_runs_manager.load_from_db(session) + instance.states_manager.load_from_db(session) if repack: repack_database(instance) return True diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 2e4b588a0b0..8ca7bef2691 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -637,6 +637,15 @@ def find_states_to_purge( ) +def find_oldest_state() -> StatementLambdaElement: + """Find the last_updated_ts of the oldest state.""" + return lambda_stmt( + lambda: select(States.last_updated_ts).where( + States.state_id.in_(select(func.min(States.state_id))) + ) + ) + + def find_short_term_statistics_to_purge( purge_before: datetime, max_bind_vars: int ) -> StatementLambdaElement: diff --git a/homeassistant/components/recorder/table_managers/states.py b/homeassistant/components/recorder/table_managers/states.py index d5cef759c54..fafcfa0ea61 100644 --- a/homeassistant/components/recorder/table_managers/states.py +++ b/homeassistant/components/recorder/table_managers/states.py @@ -2,7 +2,15 @@ from __future__ import annotations +from collections.abc import Sequence +from typing import Any, cast + +from sqlalchemy.engine.row import Row +from sqlalchemy.orm.session import Session + from ..db_schema import States +from ..queries import find_oldest_state +from ..util import execute_stmt_lambda_element class StatesManager: @@ -13,6 +21,12 @@ class StatesManager: self._pending: dict[str, States] = {} self._last_committed_id: dict[str, int] = {} self._last_reported: dict[int, float] = {} + self._oldest_ts: float | None = None + + @property + def oldest_ts(self) -> float | None: + """Return the oldest timestamp.""" + return self._oldest_ts def pop_pending(self, entity_id: str) -> States | None: """Pop a pending state. @@ -44,6 +58,8 @@ class StatesManager: recorder thread. """ self._pending[entity_id] = state + if self._oldest_ts is None: + self._oldest_ts = state.last_updated_ts def update_pending_last_reported( self, state_id: int, last_reported_timestamp: float @@ -74,6 +90,22 @@ class StatesManager: """ self._last_committed_id.clear() self._pending.clear() + self._oldest_ts = None + + def load_from_db(self, session: Session) -> None: + """Update the cache. + + Must run in the recorder thread. + """ + result = cast( + Sequence[Row[Any]], + execute_stmt_lambda_element(session, find_oldest_state()), + ) + if not result: + ts = None + else: + ts = result[0].last_updated_ts + self._oldest_ts = ts def evict_purged_state_ids(self, purged_state_ids: set[int]) -> None: """Evict purged states from the committed states. diff --git a/homeassistant/components/recorder/tasks.py b/homeassistant/components/recorder/tasks.py index 783f0a80b8e..fa10c12aa68 100644 --- a/homeassistant/components/recorder/tasks.py +++ b/homeassistant/components/recorder/tasks.py @@ -120,8 +120,6 @@ class PurgeTask(RecorderTask): if purge.purge_old_data( instance, self.purge_before, self.repack, self.apply_filter ): - with instance.get_session() as session: - instance.recorder_runs_manager.load_from_db(session) # We always need to do the db cleanups after a purge # is finished to ensure the WAL checkpoint and other # tasks happen after a vacuum. diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index ea764b14401..c3ff5027b70 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -112,6 +112,9 @@ async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder) async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" + assert recorder_mock.states_manager.oldest_ts is None + oldest_ts = recorder_mock.states_manager.oldest_ts + await _add_test_states(hass) # make sure we start with 6 states @@ -127,6 +130,10 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 + assert recorder_mock.states_manager.oldest_ts != oldest_ts + assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts + oldest_ts = recorder_mock.states_manager.oldest_ts + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id purge_before = dt_util.utcnow() - timedelta(days=4) @@ -140,6 +147,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished + # states_manager.oldest_ts is not updated until after the purge is complete + assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -162,6 +171,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> finished = purge_old_data(recorder_mock, purge_before, repack=False) assert finished + # states_manager.oldest_ts should now be updated + assert recorder_mock.states_manager.oldest_ts != oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -169,6 +180,10 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> assert states.count() == 2 assert state_attributes.count() == 1 + assert recorder_mock.states_manager.oldest_ts != oldest_ts + assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts + oldest_ts = recorder_mock.states_manager.oldest_ts + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id # run purge_old_data again @@ -181,6 +196,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished + # states_manager.oldest_ts is not updated until after the purge is complete + assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: assert states.count() == 0 From dd215b3d5d165c4ad76ef31947998001b4a54b65 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 19 Dec 2024 12:32:15 +0100 Subject: [PATCH 472/677] Revert "Revert "Simplify recorder RecorderRunsManager (#131785)"" (#133564) Revert "Revert "Simplify recorder RecorderRunsManager" (#133201)" This reverts commit 980b8a91e62c449fab558318573fa756818875a6. --- .../recorder/table_managers/recorder_runs.py | 73 +++---------------- .../table_managers/test_recorder_runs.py | 32 ++------ 2 files changed, 15 insertions(+), 90 deletions(-) diff --git a/homeassistant/components/recorder/table_managers/recorder_runs.py b/homeassistant/components/recorder/table_managers/recorder_runs.py index b0b9818118b..4ca0aa18b88 100644 --- a/homeassistant/components/recorder/table_managers/recorder_runs.py +++ b/homeassistant/components/recorder/table_managers/recorder_runs.py @@ -2,8 +2,6 @@ from __future__ import annotations -import bisect -from dataclasses import dataclass from datetime import datetime from sqlalchemy.orm.session import Session @@ -11,34 +9,6 @@ from sqlalchemy.orm.session import Session import homeassistant.util.dt as dt_util from ..db_schema import RecorderRuns -from ..models import process_timestamp - - -def _find_recorder_run_for_start_time( - run_history: _RecorderRunsHistory, start: datetime -) -> RecorderRuns | None: - """Find the recorder run for a start time in _RecorderRunsHistory.""" - run_timestamps = run_history.run_timestamps - runs_by_timestamp = run_history.runs_by_timestamp - - # bisect_left tells us were we would insert - # a value in the list of runs after the start timestamp. - # - # The run before that (idx-1) is when the run started - # - # If idx is 0, history never ran before the start timestamp - # - if idx := bisect.bisect_left(run_timestamps, start.timestamp()): - return runs_by_timestamp[run_timestamps[idx - 1]] - return None - - -@dataclass(frozen=True) -class _RecorderRunsHistory: - """Bisectable history of RecorderRuns.""" - - run_timestamps: list[int] - runs_by_timestamp: dict[int, RecorderRuns] class RecorderRunsManager: @@ -48,7 +18,7 @@ class RecorderRunsManager: """Track recorder run history.""" self._recording_start = dt_util.utcnow() self._current_run_info: RecorderRuns | None = None - self._run_history = _RecorderRunsHistory([], {}) + self._first_run: RecorderRuns | None = None @property def recording_start(self) -> datetime: @@ -58,9 +28,7 @@ class RecorderRunsManager: @property def first(self) -> RecorderRuns: """Get the first run.""" - if runs_by_timestamp := self._run_history.runs_by_timestamp: - return next(iter(runs_by_timestamp.values())) - return self.current + return self._first_run or self.current @property def current(self) -> RecorderRuns: @@ -78,15 +46,6 @@ class RecorderRunsManager: """Return if a run is active.""" return self._current_run_info is not None - def get(self, start: datetime) -> RecorderRuns | None: - """Return the recorder run that started before or at start. - - If the first run started after the start, return None - """ - if start >= self.recording_start: - return self.current - return _find_recorder_run_for_start_time(self._run_history, start) - def start(self, session: Session) -> None: """Start a new run. @@ -122,31 +81,17 @@ class RecorderRunsManager: Must run in the recorder thread. """ - run_timestamps: list[int] = [] - runs_by_timestamp: dict[int, RecorderRuns] = {} - - for run in session.query(RecorderRuns).order_by(RecorderRuns.start.asc()).all(): + if ( + run := session.query(RecorderRuns) + .order_by(RecorderRuns.start.asc()) + .first() + ): session.expunge(run) - if run_dt := process_timestamp(run.start): - # Not sure if this is correct or runs_by_timestamp annotation should be changed - timestamp = int(run_dt.timestamp()) - run_timestamps.append(timestamp) - runs_by_timestamp[timestamp] = run - - # - # self._run_history is accessed in get() - # which is allowed to be called from any thread - # - # We use a dataclass to ensure that when we update - # run_timestamps and runs_by_timestamp - # are never out of sync with each other. - # - self._run_history = _RecorderRunsHistory(run_timestamps, runs_by_timestamp) + self._first_run = run def clear(self) -> None: """Clear the current run after ending it. Must run in the recorder thread. """ - if self._current_run_info: - self._current_run_info = None + self._current_run_info = None diff --git a/tests/components/recorder/table_managers/test_recorder_runs.py b/tests/components/recorder/table_managers/test_recorder_runs.py index 41f3a8fef4d..e79def01bad 100644 --- a/tests/components/recorder/table_managers/test_recorder_runs.py +++ b/tests/components/recorder/table_managers/test_recorder_runs.py @@ -21,6 +21,11 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None two_days_ago = now - timedelta(days=2) one_day_ago = now - timedelta(days=1) + # Test that the first run falls back to the current run + assert process_timestamp( + instance.recorder_runs_manager.first.start + ) == process_timestamp(instance.recorder_runs_manager.current.start) + with instance.get_session() as session: session.add(RecorderRuns(start=three_days_ago, created=three_days_ago)) session.add(RecorderRuns(start=two_days_ago, created=two_days_ago)) @@ -29,32 +34,7 @@ async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None instance.recorder_runs_manager.load_from_db(session) assert ( - process_timestamp( - instance.recorder_runs_manager.get( - three_days_ago + timedelta(microseconds=1) - ).start - ) - == three_days_ago - ) - assert ( - process_timestamp( - instance.recorder_runs_manager.get( - two_days_ago + timedelta(microseconds=1) - ).start - ) - == two_days_ago - ) - assert ( - process_timestamp( - instance.recorder_runs_manager.get( - one_day_ago + timedelta(microseconds=1) - ).start - ) - == one_day_ago - ) - assert ( - process_timestamp(instance.recorder_runs_manager.get(now).start) - == instance.recorder_runs_manager.recording_start + process_timestamp(instance.recorder_runs_manager.first.start) == three_days_ago ) From 962f1bad32ea47ba9454aebd37eb7c4e4f307900 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Thu, 19 Dec 2024 12:40:05 +0100 Subject: [PATCH 473/677] Add mW as unit of measurement for Matter electrical power sensors (#133504) --- homeassistant/components/matter/sensor.py | 4 ++-- homeassistant/components/number/const.py | 2 +- homeassistant/components/sensor/const.py | 2 +- homeassistant/const.py | 1 + homeassistant/util/unit_conversion.py | 2 ++ .../matter/snapshots/test_sensor.ambr | 6 ++++++ tests/components/sensor/test_recorder.py | 20 +++++++++++++++---- tests/util/test_unit_conversion.py | 1 + 8 files changed, 30 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index de4fdfe2685..847c9439b81 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -580,10 +580,10 @@ DISCOVERY_SCHEMAS = [ key="ElectricalPowerMeasurementWatt", device_class=SensorDeviceClass.POWER, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfPower.WATT, + native_unit_of_measurement=UnitOfPower.MILLIWATT, + suggested_unit_of_measurement=UnitOfPower.WATT, suggested_display_precision=2, state_class=SensorStateClass.MEASUREMENT, - measurement_to_ha=lambda x: x / 1000, ), entity_class=MatterSensor, required_attributes=( diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 56466934e5f..91a9d6adfe4 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -280,7 +280,7 @@ class NumberDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` + Unit of measurement: `mW`, `W`, `kW`, `MW`, `GW`, `TW`, `BTU/h` """ PRECIPITATION = "precipitation" diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index 2fb563051a9..8c3c3925513 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -308,7 +308,7 @@ class SensorDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` + Unit of measurement: `mW`, `W`, `kW`, `MW`, `GW`, `TW`, `BTU/h` """ PRECIPITATION = "precipitation" diff --git a/homeassistant/const.py b/homeassistant/const.py index c026a8e5427..eed8d73a4ee 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -589,6 +589,7 @@ class UnitOfApparentPower(StrEnum): class UnitOfPower(StrEnum): """Power units.""" + MILLIWATT = "mW" WATT = "W" KILO_WATT = "kW" MEGA_WATT = "MW" diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 8bf6d4b9fc9..8ea290f01d1 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -340,6 +340,7 @@ class PowerConverter(BaseUnitConverter): UNIT_CLASS = "power" _UNIT_CONVERSION: dict[str | None, float] = { + UnitOfPower.MILLIWATT: 1 * 1000, UnitOfPower.WATT: 1, UnitOfPower.KILO_WATT: 1 / 1000, UnitOfPower.MEGA_WATT: 1 / 1e6, @@ -347,6 +348,7 @@ class PowerConverter(BaseUnitConverter): UnitOfPower.TERA_WATT: 1 / 1e12, } VALID_UNITS = { + UnitOfPower.MILLIWATT, UnitOfPower.WATT, UnitOfPower.KILO_WATT, UnitOfPower.MEGA_WATT, diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index e452ce45f1d..f88604e7d46 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -1744,6 +1744,9 @@ 'sensor': dict({ 'suggested_display_precision': 2, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2850,6 +2853,9 @@ 'sensor': dict({ 'suggested_display_precision': 2, }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 44eaa9fde0d..636fb9871c9 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -4247,8 +4247,14 @@ async def async_record_states( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + ( + US_CUSTOMARY_SYSTEM, + POWER_SENSOR_ATTRIBUTES, + "W", + "kW", + "GW, MW, TW, W, kW, mW", + ), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW, mW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -4459,8 +4465,14 @@ async def test_validate_statistics_unit_ignore_device_class( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + ( + US_CUSTOMARY_SYSTEM, + POWER_SENSOR_ATTRIBUTES, + "W", + "kW", + "GW, MW, TW, W, kW, mW", + ), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW, mW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index 4be32b2851e..9c123d93f62 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -537,6 +537,7 @@ _CONVERTED_VALUE: dict[ (10, UnitOfPower.GIGA_WATT, 10e9, UnitOfPower.WATT), (10, UnitOfPower.TERA_WATT, 10e12, UnitOfPower.WATT), (10, UnitOfPower.WATT, 0.01, UnitOfPower.KILO_WATT), + (10, UnitOfPower.MILLIWATT, 0.01, UnitOfPower.WATT), ], PressureConverter: [ (1000, UnitOfPressure.HPA, 14.5037743897, UnitOfPressure.PSI), From eb8ee1339cad568253ba408b3b0e3d4c6167b4da Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 19 Dec 2024 06:40:23 -0500 Subject: [PATCH 474/677] Set Russound RIO quality scale to silver (#133494) --- homeassistant/components/russound_rio/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 2cd153c232c..ab77ca3ab6a 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rio", "iot_class": "local_push", "loggers": ["aiorussound"], + "quality_scale": "silver", "requirements": ["aiorussound==4.1.0"] } From 94c7d1834620ea48a99fa04920429db51eeca13a Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Thu, 19 Dec 2024 13:36:32 +0100 Subject: [PATCH 475/677] Bump pylamarzocco to 1.4.1 (#133557) --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 7505843850c..b34df6d6917 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -37,5 +37,5 @@ "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], "quality_scale": "platinum", - "requirements": ["pylamarzocco==1.4.0"] + "requirements": ["pylamarzocco==1.4.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index d8dc08ca301..90bb9e9b2ab 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2043,7 +2043,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.4.0 +pylamarzocco==1.4.1 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a988c0836b8..7bdedce08c9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1654,7 +1654,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.4.0 +pylamarzocco==1.4.1 # homeassistant.components.lastfm pylast==5.1.0 From 255f85eb2ff25ce1e1dd168b8963817b4fd6b6f1 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 19 Dec 2024 16:04:59 +0100 Subject: [PATCH 476/677] Fix boot loop after restoring backup (#133581) --- homeassistant/backup_restore.py | 3 +++ tests/test_backup_restore.py | 9 ++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/homeassistant/backup_restore.py b/homeassistant/backup_restore.py index f9250e3129e..57e1c734dfc 100644 --- a/homeassistant/backup_restore.py +++ b/homeassistant/backup_restore.py @@ -64,6 +64,9 @@ def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | ) except (FileNotFoundError, KeyError, json.JSONDecodeError): return None + finally: + # Always remove the backup instruction file to prevent a boot loop + instruction_path.unlink(missing_ok=True) def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None: diff --git a/tests/test_backup_restore.py b/tests/test_backup_restore.py index bce5eca4292..10ea64a6a61 100644 --- a/tests/test_backup_restore.py +++ b/tests/test_backup_restore.py @@ -57,11 +57,14 @@ def test_reading_the_instruction_contents( return_value=content, side_effect=side_effect, ), + mock.patch("pathlib.Path.unlink", autospec=True) as unlink_mock, ): - read_content = backup_restore.restore_backup_file_content( - Path(get_test_config_dir()) - ) + config_path = Path(get_test_config_dir()) + read_content = backup_restore.restore_backup_file_content(config_path) assert read_content == expected + unlink_mock.assert_called_once_with( + config_path / ".HA_RESTORE", missing_ok=True + ) def test_restoring_backup_that_does_not_exist() -> None: From a3ef3cce3e5cff6330705d0d6ba5fe6d7004aa3b Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Thu, 19 Dec 2024 07:41:47 -0800 Subject: [PATCH 477/677] Improve Google Tasks coordinator updates behavior (#133316) --- .../components/google_tasks/__init__.py | 23 +++++++- .../components/google_tasks/coordinator.py | 11 ++-- homeassistant/components/google_tasks/todo.py | 14 ++--- .../components/google_tasks/types.py | 16 +----- tests/components/google_tasks/conftest.py | 14 ++++- tests/components/google_tasks/test_init.py | 35 +++++++++---- tests/components/google_tasks/test_todo.py | 52 ++++++++++++------- 7 files changed, 107 insertions(+), 58 deletions(-) diff --git a/homeassistant/components/google_tasks/__init__.py b/homeassistant/components/google_tasks/__init__.py index 2ff22068ca9..45ad1777aa0 100644 --- a/homeassistant/components/google_tasks/__init__.py +++ b/homeassistant/components/google_tasks/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import asyncio + from aiohttp import ClientError, ClientResponseError from homeassistant.const import Platform @@ -11,8 +13,9 @@ from homeassistant.helpers import config_entry_oauth2_flow from . import api from .const import DOMAIN +from .coordinator import TaskUpdateCoordinator from .exceptions import GoogleTasksApiError -from .types import GoogleTasksConfigEntry, GoogleTasksData +from .types import GoogleTasksConfigEntry __all__ = [ "DOMAIN", @@ -46,7 +49,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleTasksConfigEntry) except GoogleTasksApiError as err: raise ConfigEntryNotReady from err - entry.runtime_data = GoogleTasksData(auth, task_lists) + coordinators = [ + TaskUpdateCoordinator( + hass, + auth, + task_list["id"], + task_list["title"], + ) + for task_list in task_lists + ] + # Refresh all coordinators in parallel + await asyncio.gather( + *( + coordinator.async_config_entry_first_refresh() + for coordinator in coordinators + ) + ) + entry.runtime_data = coordinators await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/google_tasks/coordinator.py b/homeassistant/components/google_tasks/coordinator.py index 5377e2be567..a06faf00a91 100644 --- a/homeassistant/components/google_tasks/coordinator.py +++ b/homeassistant/components/google_tasks/coordinator.py @@ -20,7 +20,11 @@ class TaskUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]]): """Coordinator for fetching Google Tasks for a Task List form the API.""" def __init__( - self, hass: HomeAssistant, api: AsyncConfigEntryAuth, task_list_id: str + self, + hass: HomeAssistant, + api: AsyncConfigEntryAuth, + task_list_id: str, + task_list_title: str, ) -> None: """Initialize TaskUpdateCoordinator.""" super().__init__( @@ -30,9 +34,10 @@ class TaskUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]]): update_interval=UPDATE_INTERVAL, ) self.api = api - self._task_list_id = task_list_id + self.task_list_id = task_list_id + self.task_list_title = task_list_title async def _async_update_data(self) -> list[dict[str, Any]]: """Fetch tasks from API endpoint.""" async with asyncio.timeout(TIMEOUT): - return await self.api.list_tasks(self._task_list_id) + return await self.api.list_tasks(self.task_list_id) diff --git a/homeassistant/components/google_tasks/todo.py b/homeassistant/components/google_tasks/todo.py index 9a44b91b529..1df5e5fc2e9 100644 --- a/homeassistant/components/google_tasks/todo.py +++ b/homeassistant/components/google_tasks/todo.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import UTC, date, datetime, timedelta +from datetime import UTC, date, datetime from typing import Any, cast from homeassistant.components.todo import ( @@ -20,7 +20,6 @@ from .coordinator import TaskUpdateCoordinator from .types import GoogleTasksConfigEntry PARALLEL_UPDATES = 0 -SCAN_INTERVAL = timedelta(minutes=15) TODO_STATUS_MAP = { "needsAction": TodoItemStatus.NEEDS_ACTION, @@ -76,14 +75,13 @@ async def async_setup_entry( async_add_entities( ( GoogleTaskTodoListEntity( - TaskUpdateCoordinator(hass, entry.runtime_data.api, task_list["id"]), - task_list["title"], + coordinator, + coordinator.task_list_title, entry.entry_id, - task_list["id"], + coordinator.task_list_id, ) - for task_list in entry.runtime_data.task_lists + for coordinator in entry.runtime_data ), - True, ) @@ -118,8 +116,6 @@ class GoogleTaskTodoListEntity( @property def todo_items(self) -> list[TodoItem] | None: """Get the current set of To-do items.""" - if self.coordinator.data is None: - return None return [_convert_api_item(item) for item in _order_tasks(self.coordinator.data)] async def async_create_todo_item(self, item: TodoItem) -> None: diff --git a/homeassistant/components/google_tasks/types.py b/homeassistant/components/google_tasks/types.py index eaaec23ddf5..21500d11eb8 100644 --- a/homeassistant/components/google_tasks/types.py +++ b/homeassistant/components/google_tasks/types.py @@ -1,19 +1,7 @@ """Types for the Google Tasks integration.""" -from dataclasses import dataclass -from typing import Any - from homeassistant.config_entries import ConfigEntry -from .api import AsyncConfigEntryAuth +from .coordinator import TaskUpdateCoordinator - -@dataclass -class GoogleTasksData: - """Class to hold Google Tasks data.""" - - api: AsyncConfigEntryAuth - task_lists: list[dict[str, Any]] - - -type GoogleTasksConfigEntry = ConfigEntry[GoogleTasksData] +type GoogleTasksConfigEntry = ConfigEntry[list[TaskUpdateCoordinator]] diff --git a/tests/components/google_tasks/conftest.py b/tests/components/google_tasks/conftest.py index e519cac9bdc..8f966800147 100644 --- a/tests/components/google_tasks/conftest.py +++ b/tests/components/google_tasks/conftest.py @@ -34,6 +34,18 @@ LIST_TASK_LIST_RESPONSE = { "items": [TASK_LIST], } +LIST_TASKS_RESPONSE_WATER = { + "items": [ + { + "id": "some-task-id", + "title": "Water", + "status": "needsAction", + "description": "Any size is ok", + "position": "00000000000000000001", + }, + ], +} + @pytest.fixture def platforms() -> list[Platform]: @@ -44,7 +56,7 @@ def platforms() -> list[Platform]: @pytest.fixture(name="expires_at") def mock_expires_at() -> int: """Fixture to set the oauth token expiration time.""" - return time.time() + 3600 + return time.time() + 86400 @pytest.fixture(name="token_entry") diff --git a/tests/components/google_tasks/test_init.py b/tests/components/google_tasks/test_init.py index 9ad8c887a66..e93e0d9c643 100644 --- a/tests/components/google_tasks/test_init.py +++ b/tests/components/google_tasks/test_init.py @@ -3,6 +3,7 @@ from collections.abc import Awaitable, Callable import http from http import HTTPStatus +import json import time from unittest.mock import Mock @@ -15,13 +16,15 @@ from homeassistant.components.google_tasks.const import OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from .conftest import LIST_TASK_LIST_RESPONSE +from .conftest import LIST_TASK_LIST_RESPONSE, LIST_TASKS_RESPONSE_WATER from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker -@pytest.mark.parametrize("api_responses", [[LIST_TASK_LIST_RESPONSE]]) +@pytest.mark.parametrize( + "api_responses", [[LIST_TASK_LIST_RESPONSE, LIST_TASKS_RESPONSE_WATER]] +) async def test_setup( hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], @@ -42,8 +45,10 @@ async def test_setup( assert not hass.services.async_services().get(DOMAIN) -@pytest.mark.parametrize("expires_at", [time.time() - 3600], ids=["expired"]) -@pytest.mark.parametrize("api_responses", [[LIST_TASK_LIST_RESPONSE]]) +@pytest.mark.parametrize("expires_at", [time.time() - 86400], ids=["expired"]) +@pytest.mark.parametrize( + "api_responses", [[LIST_TASK_LIST_RESPONSE, LIST_TASKS_RESPONSE_WATER]] +) async def test_expired_token_refresh_success( hass: HomeAssistant, integration_setup: Callable[[], Awaitable[bool]], @@ -60,8 +65,8 @@ async def test_expired_token_refresh_success( json={ "access_token": "updated-access-token", "refresh_token": "updated-refresh-token", - "expires_at": time.time() + 3600, - "expires_in": 3600, + "expires_at": time.time() + 86400, + "expires_in": 86400, }, ) @@ -69,26 +74,26 @@ async def test_expired_token_refresh_success( assert config_entry.state is ConfigEntryState.LOADED assert config_entry.data["token"]["access_token"] == "updated-access-token" - assert config_entry.data["token"]["expires_in"] == 3600 + assert config_entry.data["token"]["expires_in"] == 86400 @pytest.mark.parametrize( ("expires_at", "status", "exc", "expected_state"), [ ( - time.time() - 3600, + time.time() - 86400, http.HTTPStatus.UNAUTHORIZED, None, ConfigEntryState.SETUP_ERROR, ), ( - time.time() - 3600, + time.time() - 86400, http.HTTPStatus.INTERNAL_SERVER_ERROR, None, ConfigEntryState.SETUP_RETRY, ), ( - time.time() - 3600, + time.time() - 86400, None, ClientError("error"), ConfigEntryState.SETUP_RETRY, @@ -124,6 +129,16 @@ async def test_expired_token_refresh_failure( "response_handler", [ ([(Response({"status": HTTPStatus.INTERNAL_SERVER_ERROR}), b"")]), + # First request succeeds, second request fails + ( + [ + ( + Response({"status": HTTPStatus.OK}), + json.dumps(LIST_TASK_LIST_RESPONSE), + ), + (Response({"status": HTTPStatus.INTERNAL_SERVER_ERROR}), b""), + ] + ), ], ) async def test_setup_error( diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index c713b9fd44f..f28f1bb917e 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -6,10 +6,12 @@ import json from typing import Any from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory from httplib2 import Response import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.google_tasks.coordinator import UPDATE_INTERVAL from homeassistant.components.todo import ( ATTR_DESCRIPTION, ATTR_DUE_DATE, @@ -19,12 +21,17 @@ from homeassistant.components.todo import ( DOMAIN as TODO_DOMAIN, TodoServices, ) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from .conftest import LIST_TASK_LIST_RESPONSE, create_response_object +from .conftest import ( + LIST_TASK_LIST_RESPONSE, + LIST_TASKS_RESPONSE_WATER, + create_response_object, +) +from tests.common import async_fire_time_changed from tests.typing import WebSocketGenerator ENTITY_ID = "todo.my_tasks" @@ -44,17 +51,6 @@ ERROR_RESPONSE = { CONTENT_ID = "Content-ID" BOUNDARY = "batch_00972cc8-75bd-11ee-9692-0242ac110002" # Arbitrary uuid -LIST_TASKS_RESPONSE_WATER = { - "items": [ - { - "id": "some-task-id", - "title": "Water", - "status": "needsAction", - "description": "Any size is ok", - "position": "00000000000000000001", - }, - ], -} LIST_TASKS_RESPONSE_MULTIPLE = { "items": [ { @@ -311,7 +307,9 @@ async def test_empty_todo_list( [ [ LIST_TASK_LIST_RESPONSE, - ERROR_RESPONSE, + LIST_TASKS_RESPONSE_WATER, + ERROR_RESPONSE, # Fail after one update interval + LIST_TASKS_RESPONSE_WATER, ] ], ) @@ -319,18 +317,34 @@ async def test_task_items_error_response( hass: HomeAssistant, setup_credentials: None, integration_setup: Callable[[], Awaitable[bool]], - hass_ws_client: WebSocketGenerator, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + freezer: FrozenDateTimeFactory, ) -> None: - """Test an error while getting todo list items.""" + """Test an error while the entity updates getting a new list of todo list items.""" assert await integration_setup() - await hass_ws_client(hass) + # Test successful setup and first data fetch + state = hass.states.get("todo.my_tasks") + assert state + assert state.state == "1" + + # Next update fails + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("todo.my_tasks") assert state - assert state.state == "unavailable" + assert state.state == STATE_UNAVAILABLE + + # Next update succeeds + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("todo.my_tasks") + assert state + assert state.state == "1" @pytest.mark.parametrize( From 95b3d27b6073e1cac9015185da873fbf9c28e471 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 19 Dec 2024 18:23:40 +0100 Subject: [PATCH 478/677] Update Airgradient quality scale (#133569) --- .../components/airgradient/quality_scale.yaml | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/airgradient/quality_scale.yaml b/homeassistant/components/airgradient/quality_scale.yaml index 8d62e8515fc..71132fdb47a 100644 --- a/homeassistant/components/airgradient/quality_scale.yaml +++ b/homeassistant/components/airgradient/quality_scale.yaml @@ -7,7 +7,9 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: done + config-flow-test-coverage: + status: todo + comment: Missing zeroconf duplicate entry test. config-flow: done dependency-transparency: done docs-actions: @@ -31,7 +33,9 @@ rules: # Silver action-exceptions: todo config-entry-unloading: done - docs-configuration-parameters: todo + docs-configuration-parameters: + status: exempt + comment: No options to configure docs-installation-parameters: todo entity-unavailable: done integration-owner: done @@ -41,12 +45,16 @@ rules: status: exempt comment: | This integration does not require authentication. - test-coverage: done + test-coverage: todo # Gold devices: done diagnostics: done - discovery-update-info: done - discovery: done + discovery-update-info: + status: todo + comment: DHCP is still possible + discovery: + status: todo + comment: DHCP is still possible docs-data-update: todo docs-examples: todo docs-known-limitations: todo From 1a068d99d62a5da299aeb73cc027cbd446872359 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Thu, 19 Dec 2024 17:28:50 +0000 Subject: [PATCH 479/677] Add data descriptions to Mealie integration (#133590) --- homeassistant/components/mealie/strings.json | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index 830d43d8f93..de91c507950 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -1,4 +1,9 @@ { + "common": { + "data_description_host": "The URL of your Mealie instance, for example, http://192.168.1.123:1234", + "data_description_api_token": "The API token of your Mealie instance from your user profile within Mealie.", + "data_description_verify_ssl": "Should SSL certificates be verified? This should be off for self-signed certificates." + }, "config": { "step": { "user": { @@ -8,13 +13,18 @@ "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "data_description": { - "host": "The URL of your Mealie instance, for example, http://192.168.1.123:1234" + "host": "[%key:component::mealie::common::data_description_host%]", + "api_token": "[%key:component::mealie::common::data_description_api_token%]", + "verify_ssl": "[%key:component::mealie::common::data_description_verify_ssl%]" } }, "reauth_confirm": { "description": "Please reauthenticate with Mealie.", "data": { "api_token": "[%key:common::config_flow::data::api_token%]" + }, + "data_description": { + "api_token": "[%key:component::mealie::common::data_description_api_token%]" } }, "reconfigure": { @@ -23,6 +33,11 @@ "host": "[%key:common::config_flow::data::url%]", "api_token": "[%key:common::config_flow::data::api_token%]", "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "host": "[%key:component::mealie::common::data_description_host%]", + "api_token": "[%key:component::mealie::common::data_description_api_token%]", + "verify_ssl": "[%key:component::mealie::common::data_description_verify_ssl%]" } } }, From e357e0a406c648f957f845642e39a37ebcc68135 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 19 Dec 2024 18:40:04 +0100 Subject: [PATCH 480/677] Set default min/max color temperature in template lights (#133549) --- homeassistant/components/template/light.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index 0654a42406a..9391e368e2b 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -16,6 +16,8 @@ from homeassistant.components.light import ( ATTR_RGBW_COLOR, ATTR_RGBWW_COLOR, ATTR_TRANSITION, + DEFAULT_MAX_KELVIN, + DEFAULT_MIN_KELVIN, ENTITY_ID_FORMAT, PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, ColorMode, @@ -278,7 +280,7 @@ class LightTemplate(TemplateEntity, LightEntity): if self._max_mireds is not None: return color_util.color_temperature_mired_to_kelvin(self._max_mireds) - return super().min_color_temp_kelvin + return DEFAULT_MIN_KELVIN @property def max_color_temp_kelvin(self) -> int: @@ -286,7 +288,7 @@ class LightTemplate(TemplateEntity, LightEntity): if self._min_mireds is not None: return color_util.color_temperature_mired_to_kelvin(self._min_mireds) - return super().max_color_temp_kelvin + return DEFAULT_MAX_KELVIN @property def hs_color(self) -> tuple[float, float] | None: From a97434976e44b952f50b38d937936bb9d13f97cb Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Thu, 19 Dec 2024 19:00:18 +0100 Subject: [PATCH 481/677] Handle null value for elapsed time in Music Assistant (#133597) --- homeassistant/components/music_assistant/media_player.py | 8 ++------ tests/components/music_assistant/fixtures/players.json | 2 +- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index 7d09bd5b888..7004f09aad5 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -565,17 +565,13 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): # shuffle and repeat are not (yet) supported for external sources self._attr_shuffle = None self._attr_repeat = None - if TYPE_CHECKING: - assert player.elapsed_time is not None - self._attr_media_position = int(player.elapsed_time) + self._attr_media_position = int(player.elapsed_time or 0) self._attr_media_position_updated_at = ( utc_from_timestamp(player.elapsed_time_last_updated) if player.elapsed_time_last_updated else None ) - if TYPE_CHECKING: - assert player.elapsed_time is not None - self._prev_time = player.elapsed_time + self._prev_time = player.elapsed_time or 0 return if queue is None: diff --git a/tests/components/music_assistant/fixtures/players.json b/tests/components/music_assistant/fixtures/players.json index 2d8b88d0e8e..8a08a55dc45 100644 --- a/tests/components/music_assistant/fixtures/players.json +++ b/tests/components/music_assistant/fixtures/players.json @@ -20,7 +20,7 @@ "power", "enqueue" ], - "elapsed_time": 0, + "elapsed_time": null, "elapsed_time_last_updated": 0, "state": "idle", "volume_level": 20, From 2f77cda822d99cfdf261d46d0cfc5ed0a1c543cf Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Thu, 19 Dec 2024 19:18:21 +0100 Subject: [PATCH 482/677] Add basic UniFi Protect AiPort support (#133523) * UnifiProtect add basic support for AiPort devices * Sort ignore-words --------- Co-authored-by: J. Nick Koston --- .pre-commit-config.yaml | 2 +- homeassistant/components/unifiprotect/const.py | 1 + homeassistant/components/unifiprotect/entity.py | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6ecae762dcd..a4568552780 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,7 @@ repos: hooks: - id: codespell args: - - --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn + - --ignore-words-list=aiport,astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn - --skip="./.*,*.csv,*.json,*.ambr" - --quiet-level=2 exclude_types: [csv, json, html] diff --git a/homeassistant/components/unifiprotect/const.py b/homeassistant/components/unifiprotect/const.py index 7d1e5b55d3f..d607f87b76a 100644 --- a/homeassistant/components/unifiprotect/const.py +++ b/homeassistant/components/unifiprotect/const.py @@ -41,6 +41,7 @@ DEFAULT_VERIFY_SSL = False DEFAULT_MAX_MEDIA = 1000 DEVICES_THAT_ADOPT = { + ModelType.AIPORT, ModelType.CAMERA, ModelType.LIGHT, ModelType.VIEWPORT, diff --git a/homeassistant/components/unifiprotect/entity.py b/homeassistant/components/unifiprotect/entity.py index 1d68b18f1de..335bc1e933d 100644 --- a/homeassistant/components/unifiprotect/entity.py +++ b/homeassistant/components/unifiprotect/entity.py @@ -119,6 +119,7 @@ def _async_device_entities( _ALL_MODEL_TYPES = ( + ModelType.AIPORT, ModelType.CAMERA, ModelType.LIGHT, ModelType.SENSOR, From 52683c5f75af9eab7eb5a7b35af08c6c5d0fa7e2 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 19 Dec 2024 19:58:33 +0100 Subject: [PATCH 483/677] Improve Airgradient config flow tests (#133594) --- .../components/airgradient/quality_scale.yaml | 4 +--- tests/components/airgradient/test_config_flow.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airgradient/quality_scale.yaml b/homeassistant/components/airgradient/quality_scale.yaml index 71132fdb47a..43816401cdb 100644 --- a/homeassistant/components/airgradient/quality_scale.yaml +++ b/homeassistant/components/airgradient/quality_scale.yaml @@ -7,9 +7,7 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: Missing zeroconf duplicate entry test. + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: diff --git a/tests/components/airgradient/test_config_flow.py b/tests/components/airgradient/test_config_flow.py index 73dbd17a213..8927947c40e 100644 --- a/tests/components/airgradient/test_config_flow.py +++ b/tests/components/airgradient/test_config_flow.py @@ -255,6 +255,20 @@ async def test_zeroconf_flow_abort_old_firmware(hass: HomeAssistant) -> None: assert result["reason"] == "invalid_version" +async def test_zeroconf_flow_abort_duplicate( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test zeroconf flow aborts with duplicate.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + async def test_user_flow_works_discovery( hass: HomeAssistant, mock_new_airgradient_client: AsyncMock, From 04bcc8d3d3af8679410b4c7b9f69edac825a5d11 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 19 Dec 2024 09:13:51 -1000 Subject: [PATCH 484/677] Bump yalexs-ble to 2.5.6 (#133593) --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yalexs_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index d0b41411c96..652f1a7b966 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.6"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 7b7edfac77b..f1cde31d066 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.6"] } diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index b2c331397b3..15b11719fdb 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.5"] + "requirements": ["yalexs-ble==2.5.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index 90bb9e9b2ab..1f40c8d1612 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3060,7 +3060,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.5 +yalexs-ble==2.5.6 # homeassistant.components.august # homeassistant.components.yale diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7bdedce08c9..c82b937f1b0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2455,7 +2455,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.5 +yalexs-ble==2.5.6 # homeassistant.components.august # homeassistant.components.yale From e6ef3fe5070816664969257233d178b4ad1b457e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Thu, 19 Dec 2024 19:24:10 +0000 Subject: [PATCH 485/677] Update Idasen Desk user flow step strings (#133605) --- homeassistant/components/idasen_desk/quality_scale.yaml | 5 +---- homeassistant/components/idasen_desk/strings.json | 5 ++++- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml index 1908178ec15..4af2f489bd3 100644 --- a/homeassistant/components/idasen_desk/quality_scale.yaml +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -15,10 +15,7 @@ rules: comment: | - use mock_desk_api - merge test_user_step_auth_failed, test_user_step_cannot_connect and test_user_step_unknown_exception. - config-flow: - status: todo - comment: | - Missing data description for user step. + config-flow: done dependency-transparency: done docs-actions: status: exempt diff --git a/homeassistant/components/idasen_desk/strings.json b/homeassistant/components/idasen_desk/strings.json index 70e08976925..7486973638b 100644 --- a/homeassistant/components/idasen_desk/strings.json +++ b/homeassistant/components/idasen_desk/strings.json @@ -4,7 +4,10 @@ "step": { "user": { "data": { - "address": "Bluetooth address" + "address": "Device" + }, + "data_description": { + "address": "The bluetooth device for the desk." } } }, From 2413fc4c0d39b59d47ae6d593d8e928d1e23abb1 Mon Sep 17 00:00:00 2001 From: adam-the-hero <132444842+adam-the-hero@users.noreply.github.com> Date: Thu, 19 Dec 2024 20:25:24 +0100 Subject: [PATCH 486/677] Fix Watergate Water meter volume sensor (#133606) --- homeassistant/components/watergate/sensor.py | 2 +- tests/components/watergate/snapshots/test_sensor.ambr | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/watergate/sensor.py b/homeassistant/components/watergate/sensor.py index 82ac7cfea92..638bf297415 100644 --- a/homeassistant/components/watergate/sensor.py +++ b/homeassistant/components/watergate/sensor.py @@ -56,7 +56,7 @@ class WatergateSensorEntityDescription(SensorEntityDescription): DESCRIPTIONS: list[WatergateSensorEntityDescription] = [ WatergateSensorEntityDescription( value_fn=lambda data: ( - data.state.water_meter.duration + data.state.water_meter.volume if data.state and data.state.water_meter else None ), diff --git a/tests/components/watergate/snapshots/test_sensor.ambr b/tests/components/watergate/snapshots/test_sensor.ambr index a8969798105..479a879a583 100644 --- a/tests/components/watergate/snapshots/test_sensor.ambr +++ b/tests/components/watergate/snapshots/test_sensor.ambr @@ -352,7 +352,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '1.2', }) # --- # name: test_sensor[sensor.sonic_water_pressure-entry] From 61e5f10d12d184fd350ab99c4d6698654faa0069 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 19 Dec 2024 20:27:08 +0100 Subject: [PATCH 487/677] Fix Twinkly raise on progress (#133601) --- .../components/twinkly/config_flow.py | 4 +- tests/components/twinkly/test_config_flow.py | 37 +++++++++++++++++++ 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/twinkly/config_flow.py b/homeassistant/components/twinkly/config_flow.py index 68c455dc619..837bd9ccb6a 100644 --- a/homeassistant/components/twinkly/config_flow.py +++ b/homeassistant/components/twinkly/config_flow.py @@ -45,7 +45,9 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): except (TimeoutError, ClientError): errors[CONF_HOST] = "cannot_connect" else: - await self.async_set_unique_id(device_info[DEV_ID]) + await self.async_set_unique_id( + device_info[DEV_ID], raise_on_progress=False + ) self._abort_if_unique_id_configured() return self._create_entry_from_device(device_info, host) diff --git a/tests/components/twinkly/test_config_flow.py b/tests/components/twinkly/test_config_flow.py index 9b9aeafd082..8d8e955291e 100644 --- a/tests/components/twinkly/test_config_flow.py +++ b/tests/components/twinkly/test_config_flow.py @@ -5,6 +5,7 @@ from unittest.mock import patch from homeassistant import config_entries from homeassistant.components import dhcp from homeassistant.components.twinkly.const import DOMAIN as TWINKLY_DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -157,3 +158,39 @@ async def test_dhcp_already_exists(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_user_flow_works_discovery(hass: HomeAssistant) -> None: + """Test user flow can continue after discovery happened.""" + client = ClientMock() + with ( + patch( + "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client + ), + patch("homeassistant.components.twinkly.async_setup_entry", return_value=True), + ): + await hass.config_entries.flow.async_init( + TWINKLY_DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + hostname="Twinkly_XYZ", + ip="1.2.3.4", + macaddress="aabbccddeeff", + ), + ) + result = await hass.config_entries.flow.async_init( + TWINKLY_DOMAIN, + context={"source": SOURCE_USER}, + ) + assert len(hass.config_entries.flow.async_progress(TWINKLY_DOMAIN)) == 2 + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "10.0.0.131"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # Verify the discovery flow was aborted + assert not hass.config_entries.flow.async_progress(TWINKLY_DOMAIN) From b261c7f18ab7fad9ab7deb49e33440f2906305c5 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Thu, 19 Dec 2024 20:29:12 +0100 Subject: [PATCH 488/677] Mark `docs-installation-parameters` for SABnzbd as done (#133609) --- homeassistant/components/sabnzbd/quality_scale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/sabnzbd/quality_scale.yaml b/homeassistant/components/sabnzbd/quality_scale.yaml index ef4e72b4936..a1d6fc076b2 100644 --- a/homeassistant/components/sabnzbd/quality_scale.yaml +++ b/homeassistant/components/sabnzbd/quality_scale.yaml @@ -35,7 +35,7 @@ rules: status: exempt comment: | The integration does not provide any additional options. - docs-installation-parameters: todo + docs-installation-parameters: done entity-unavailable: done integration-owner: done log-when-unavailable: done From 551a584ca69771804b6f094eceb67dcb25a2f627 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 19 Dec 2024 10:39:39 -1000 Subject: [PATCH 489/677] Handle mqtt.WebsocketConnectionError when connecting to the MQTT broker (#133610) fixes #132985 --- homeassistant/components/mqtt/client.py | 2 +- tests/components/mqtt/test_client.py | 11 +++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 0091d2370a4..73c6b80cb14 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -661,7 +661,7 @@ class MQTT: self.conf.get(CONF_PORT, DEFAULT_PORT), self.conf.get(CONF_KEEPALIVE, DEFAULT_KEEPALIVE), ) - except OSError as err: + except (OSError, mqtt.WebsocketConnectionError) as err: _LOGGER.error("Failed to connect to MQTT server due to exception: %s", err) self._async_connection_result(False) finally: diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 4bfcde752ae..1878045a9b9 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -1403,8 +1403,15 @@ async def test_handle_mqtt_timeout_on_callback( assert not mock_debouncer.is_set() +@pytest.mark.parametrize( + "exception", + [ + OSError("Connection error"), + paho_mqtt.WebsocketConnectionError("Connection error"), + ], +) async def test_setup_raises_config_entry_not_ready_if_no_connect_broker( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, exception: Exception ) -> None: """Test for setup failure if connection to broker is missing.""" entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) @@ -1413,7 +1420,7 @@ async def test_setup_raises_config_entry_not_ready_if_no_connect_broker( with patch( "homeassistant.components.mqtt.async_client.AsyncMQTTClient" ) as mock_client: - mock_client().connect = MagicMock(side_effect=OSError("Connection error")) + mock_client().connect = MagicMock(side_effect=exception) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert "Failed to connect to MQTT server due to exception:" in caplog.text From 64aba0c1a372a2c13f68f2edd3170fec93a1cf5d Mon Sep 17 00:00:00 2001 From: Quentame Date: Fri, 20 Dec 2024 00:48:03 +0100 Subject: [PATCH 490/677] Bump Freebox to 1.2.1 (#133455) --- homeassistant/components/freebox/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/freebox/manifest.json b/homeassistant/components/freebox/manifest.json index ad7da1703b8..46422cee105 100644 --- a/homeassistant/components/freebox/manifest.json +++ b/homeassistant/components/freebox/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/freebox", "iot_class": "local_polling", "loggers": ["freebox_api"], - "requirements": ["freebox-api==1.1.0"], + "requirements": ["freebox-api==1.2.1"], "zeroconf": ["_fbx-api._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 1f40c8d1612..79cf3658b9f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -940,7 +940,7 @@ forecast-solar==4.0.0 fortiosapi==1.0.5 # homeassistant.components.freebox -freebox-api==1.1.0 +freebox-api==1.2.1 # homeassistant.components.free_mobile freesms==0.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c82b937f1b0..589ed932ebd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -796,7 +796,7 @@ foobot_async==1.0.0 forecast-solar==4.0.0 # homeassistant.components.freebox -freebox-api==1.1.0 +freebox-api==1.2.1 # homeassistant.components.fritz # homeassistant.components.fritzbox_callmonitor From afae257a129b2b2d50b0448b9923a436717f47d6 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Fri, 20 Dec 2024 01:14:48 +0100 Subject: [PATCH 491/677] Bump PyViCare to 2.39.1 (#133619) --- homeassistant/components/vicare/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 72bc3de53d8..98ff6ce4c82 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.39.0"] + "requirements": ["PyViCare==2.39.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 79cf3658b9f..a4f61fde797 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -100,7 +100,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.39.0 +PyViCare==2.39.1 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 589ed932ebd..d374203a614 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -94,7 +94,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.39.0 +PyViCare==2.39.1 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 From 3d20c5c5d613bf27e1898ce0d9a6a450ebb54199 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 19 Dec 2024 21:24:47 -1000 Subject: [PATCH 492/677] Remove lower bound for history start time state query (#133607) Remove lower bound for start time state query With the new query in #133553 we do not need a lower bound on the search since it will always use index now and we always want the newest value in the index before the provided timestamp. The lower bound is redudant at this point as it will always be older than the oldest time point for the state. It only made sense when the query would have had to examine a time window of states instead of doing an index only search. --- .../components/recorder/history/modern.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index a8902e184ec..2d8f4da5f38 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -182,7 +182,6 @@ def _significant_states_stmt( unioned_subquery = union_all( _select_from_subquery( _get_start_time_state_stmt( - run_start_ts, start_time_ts, single_metadata_id, metadata_ids, @@ -352,11 +351,12 @@ def _state_changed_during_period_stmt( ) if limit: stmt = stmt.limit(limit) - stmt = stmt.order_by( - States.metadata_id, - States.last_updated_ts, - ) + stmt = stmt.order_by(States.metadata_id, States.last_updated_ts) if not include_start_time_state or not run_start_ts: + # If we do not need the start time state or the + # oldest possible timestamp is newer than the start time + # we can return the statement as is as there will + # never be a start time state. return stmt return _select_from_subquery( union_all( @@ -555,7 +555,6 @@ def get_last_state_changes( def _get_start_time_state_for_entities_stmt( - run_start_ts: float, epoch_time: float, metadata_ids: list[int], no_attributes: bool, @@ -583,7 +582,6 @@ def _get_start_time_state_for_entities_stmt( .where( (StatesMeta.metadata_id == States.metadata_id) & (States.last_updated_ts < epoch_time) - & (States.last_updated_ts >= run_start_ts) ) .order_by(States.last_updated_ts.desc()) .limit(1) @@ -617,7 +615,6 @@ def _get_oldest_possible_ts( def _get_start_time_state_stmt( - run_start_ts: float, epoch_time: float, single_metadata_id: int | None, metadata_ids: list[int], @@ -638,7 +635,6 @@ def _get_start_time_state_stmt( # We have more than one entity to look at so we need to do a query on states # since the last recorder run started. return _get_start_time_state_for_entities_stmt( - run_start_ts, epoch_time, metadata_ids, no_attributes, From 26212798a334e208a35a0c6dfc0dc495d149fa40 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Fri, 20 Dec 2024 08:25:08 +0100 Subject: [PATCH 493/677] Fixes and code cleanup for IronOS integration (#133579) * Fix typing and cleanup in IronOS integration * fix test not using freezer * changes * fix timedelta --- homeassistant/components/iron_os/entity.py | 14 +++++++------- homeassistant/components/iron_os/number.py | 12 +++++------- homeassistant/components/iron_os/select.py | 8 +++----- tests/components/iron_os/test_init.py | 6 ++++-- 4 files changed, 19 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/iron_os/entity.py b/homeassistant/components/iron_os/entity.py index 684957a2197..190a9f33639 100644 --- a/homeassistant/components/iron_os/entity.py +++ b/homeassistant/components/iron_os/entity.py @@ -2,29 +2,28 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import MANUFACTURER, MODEL -from .coordinator import IronOSBaseCoordinator +from .coordinator import IronOSLiveDataCoordinator -class IronOSBaseEntity(CoordinatorEntity[IronOSBaseCoordinator]): +class IronOSBaseEntity(CoordinatorEntity[IronOSLiveDataCoordinator]): """Base IronOS entity.""" _attr_has_entity_name = True def __init__( self, - coordinator: IronOSBaseCoordinator, + coordinator: IronOSLiveDataCoordinator, entity_description: EntityDescription, - context: Any | None = None, ) -> None: """Initialize the sensor.""" - super().__init__(coordinator, context=context) + super().__init__(coordinator) self.entity_description = entity_description self._attr_unique_id = ( @@ -32,7 +31,8 @@ class IronOSBaseEntity(CoordinatorEntity[IronOSBaseCoordinator]): ) if TYPE_CHECKING: assert coordinator.config_entry.unique_id - self.device_info = DeviceInfo( + + self._attr_device_info = DeviceInfo( connections={(CONNECTION_BLUETOOTH, coordinator.config_entry.unique_id)}, manufacturer=MANUFACTURER, model=MODEL, diff --git a/homeassistant/components/iron_os/number.py b/homeassistant/components/iron_os/number.py index a288a61b021..583844223dd 100644 --- a/homeassistant/components/iron_os/number.py +++ b/homeassistant/components/iron_os/number.py @@ -336,10 +336,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up number entities from a config entry.""" - coordinator = entry.runtime_data + coordinators = entry.runtime_data async_add_entities( - IronOSNumberEntity(coordinator, description) + IronOSNumberEntity(coordinators, description) for description in PINECIL_NUMBER_DESCRIPTIONS ) @@ -351,15 +351,13 @@ class IronOSNumberEntity(IronOSBaseEntity, NumberEntity): def __init__( self, - coordinator: IronOSCoordinators, + coordinators: IronOSCoordinators, entity_description: IronOSNumberEntityDescription, ) -> None: """Initialize the number entity.""" - super().__init__( - coordinator.live_data, entity_description, entity_description.characteristic - ) + super().__init__(coordinators.live_data, entity_description) - self.settings = coordinator.settings + self.settings = coordinators.settings async def async_set_native_value(self, value: float) -> None: """Update the current value.""" diff --git a/homeassistant/components/iron_os/select.py b/homeassistant/components/iron_os/select.py index c863e076f0b..10d8a6fcef5 100644 --- a/homeassistant/components/iron_os/select.py +++ b/homeassistant/components/iron_os/select.py @@ -164,15 +164,13 @@ class IronOSSelectEntity(IronOSBaseEntity, SelectEntity): def __init__( self, - coordinator: IronOSCoordinators, + coordinators: IronOSCoordinators, entity_description: IronOSSelectEntityDescription, ) -> None: """Initialize the select entity.""" - super().__init__( - coordinator.live_data, entity_description, entity_description.characteristic - ) + super().__init__(coordinators.live_data, entity_description) - self.settings = coordinator.settings + self.settings = coordinators.settings @property def current_option(self) -> str | None: diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py index 21194a55eea..15327c55121 100644 --- a/tests/components/iron_os/test_init.py +++ b/tests/components/iron_os/test_init.py @@ -1,6 +1,6 @@ """Test init of IronOS integration.""" -from datetime import datetime, timedelta +from datetime import timedelta from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory @@ -73,6 +73,7 @@ async def test_settings_exception( hass: HomeAssistant, config_entry: MockConfigEntry, mock_pynecil: AsyncMock, + freezer: FrozenDateTimeFactory, ) -> None: """Test skipping of settings on exception.""" mock_pynecil.get_settings.side_effect = CommunicationError @@ -80,7 +81,8 @@ async def test_settings_exception( config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, datetime.now() + timedelta(seconds=60)) + freezer.tick(timedelta(seconds=60)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED From ad34bc89101f16a3a8b5ebf55ad45fa133548456 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 20 Dec 2024 08:26:36 +0100 Subject: [PATCH 494/677] Add min/max price sensor to Nord Pool (#133534) * Add min/max price sensor to Nord Pool * Last fixes * Make link in strings * Replace func --- homeassistant/components/nordpool/sensor.py | 75 ++++++- .../components/nordpool/strings.json | 22 ++ .../nordpool/snapshots/test_sensor.ambr | 208 ++++++++++++++++++ 3 files changed, 298 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py index 47617cc8e42..fe966e99168 100644 --- a/homeassistant/components/nordpool/sensor.py +++ b/homeassistant/components/nordpool/sensor.py @@ -27,6 +27,20 @@ from .entity import NordpoolBaseEntity PARALLEL_UPDATES = 0 +def validate_prices( + func: Callable[ + [DeliveryPeriodData], dict[str, tuple[float | None, float, float | None]] + ], + data: DeliveryPeriodData, + area: str, + index: int, +) -> float | None: + """Validate and return.""" + if result := func(data)[area][index]: + return result / 1000 + return None + + def get_prices( data: DeliveryPeriodData, ) -> dict[str, tuple[float | None, float, float | None]]: @@ -67,6 +81,26 @@ def get_prices( return result +def get_min_max_price( + data: DeliveryPeriodData, + area: str, + func: Callable[[float, float], float], +) -> tuple[float, datetime, datetime]: + """Get the lowest price from the data.""" + price_data = data.entries + price: float = price_data[0].entry[area] + start: datetime = price_data[0].start + end: datetime = price_data[0].end + for entry in price_data: + for _area, _price in entry.entry.items(): + if _area == area and _price == func(price, _price): + price = _price + start = entry.start + end = entry.end + + return (price, start, end) + + def get_blockprices( data: DeliveryPeriodData, ) -> dict[str, dict[str, tuple[datetime, datetime, float, float, float]]]: @@ -103,7 +137,8 @@ class NordpoolDefaultSensorEntityDescription(SensorEntityDescription): class NordpoolPricesSensorEntityDescription(SensorEntityDescription): """Describes Nord Pool prices sensor entity.""" - value_fn: Callable[[tuple[float | None, float, float | None]], float | None] + value_fn: Callable[[DeliveryPeriodData, str], float | None] + extra_fn: Callable[[DeliveryPeriodData, str], dict[str, str] | None] @dataclass(frozen=True, kw_only=True) @@ -142,20 +177,43 @@ PRICES_SENSOR_TYPES: tuple[NordpoolPricesSensorEntityDescription, ...] = ( NordpoolPricesSensorEntityDescription( key="current_price", translation_key="current_price", - value_fn=lambda data: data[1] / 1000, + value_fn=lambda data, area: validate_prices(get_prices, data, area, 1), + extra_fn=lambda data, area: None, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="last_price", translation_key="last_price", - value_fn=lambda data: data[0] / 1000 if data[0] else None, + value_fn=lambda data, area: validate_prices(get_prices, data, area, 0), + extra_fn=lambda data, area: None, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="next_price", translation_key="next_price", - value_fn=lambda data: data[2] / 1000 if data[2] else None, + value_fn=lambda data, area: validate_prices(get_prices, data, area, 2), + extra_fn=lambda data, area: None, + suggested_display_precision=2, + ), + NordpoolPricesSensorEntityDescription( + key="lowest_price", + translation_key="lowest_price", + value_fn=lambda data, area: get_min_max_price(data, area, min)[0] / 1000, + extra_fn=lambda data, area: { + "start": get_min_max_price(data, area, min)[1].isoformat(), + "end": get_min_max_price(data, area, min)[2].isoformat(), + }, + suggested_display_precision=2, + ), + NordpoolPricesSensorEntityDescription( + key="highest_price", + translation_key="highest_price", + value_fn=lambda data, area: get_min_max_price(data, area, max)[0] / 1000, + extra_fn=lambda data, area: { + "start": get_min_max_price(data, area, max)[1].isoformat(), + "end": get_min_max_price(data, area, max)[2].isoformat(), + }, suggested_display_precision=2, ), ) @@ -285,9 +343,12 @@ class NordpoolPriceSensor(NordpoolBaseEntity, SensorEntity): @property def native_value(self) -> float | None: """Return value of sensor.""" - return self.entity_description.value_fn( - get_prices(self.coordinator.data)[self.area] - ) + return self.entity_description.value_fn(self.coordinator.data, self.area) + + @property + def extra_state_attributes(self) -> dict[str, str] | None: + """Return the extra state attributes.""" + return self.entity_description.extra_fn(self.coordinator.data, self.area) class NordpoolBlockPriceSensor(NordpoolBaseEntity, SensorEntity): diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json index d30898730b9..cc10a1a0640 100644 --- a/homeassistant/components/nordpool/strings.json +++ b/homeassistant/components/nordpool/strings.json @@ -50,6 +50,28 @@ "next_price": { "name": "Next price" }, + "lowest_price": { + "name": "Lowest price", + "state_attributes": { + "start": { + "name": "Start time" + }, + "end": { + "name": "End time" + } + } + }, + "highest_price": { + "name": "Highest price", + "state_attributes": { + "start": { + "name": "[%key:component::nordpool::entity::sensor::lowest_price::state_attributes::start::name%]" + }, + "end": { + "name": "[%key:component::nordpool::entity::sensor::lowest_price::state_attributes::end::name%]" + } + } + }, "block_average": { "name": "{block} average" }, diff --git a/tests/components/nordpool/snapshots/test_sensor.ambr b/tests/components/nordpool/snapshots/test_sensor.ambr index 01600352861..9b328c3a71d 100644 --- a/tests/components/nordpool/snapshots/test_sensor.ambr +++ b/tests/components/nordpool/snapshots/test_sensor.ambr @@ -200,6 +200,58 @@ 'state': '11.6402', }) # --- +# name: test_sensor[sensor.nord_pool_se3_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'highest_price', + 'unique_id': 'SE3-highest_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'end': '2024-11-05T17:00:00+00:00', + 'friendly_name': 'Nord Pool SE3 Highest price', + 'start': '2024-11-05T16:00:00+00:00', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.51265', + }) +# --- # name: test_sensor[sensor.nord_pool_se3_last_updated-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -247,6 +299,58 @@ 'state': '2024-11-04T12:15:03+00:00', }) # --- +# name: test_sensor[sensor.nord_pool_se3_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lowest_price', + 'unique_id': 'SE3-lowest_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'end': '2024-11-05T03:00:00+00:00', + 'friendly_name': 'Nord Pool SE3 Lowest price', + 'start': '2024-11-05T02:00:00+00:00', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.06169', + }) +# --- # name: test_sensor[sensor.nord_pool_se3_next_price-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1307,6 +1411,58 @@ 'state': '11.6402', }) # --- +# name: test_sensor[sensor.nord_pool_se4_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'highest_price', + 'unique_id': 'SE4-highest_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'end': '2024-11-05T17:00:00+00:00', + 'friendly_name': 'Nord Pool SE4 Highest price', + 'start': '2024-11-05T16:00:00+00:00', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.53303', + }) +# --- # name: test_sensor[sensor.nord_pool_se4_last_updated-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1354,6 +1510,58 @@ 'state': '2024-11-04T12:15:03+00:00', }) # --- +# name: test_sensor[sensor.nord_pool_se4_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lowest_price', + 'unique_id': 'SE4-lowest_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'end': '2024-11-05T03:00:00+00:00', + 'friendly_name': 'Nord Pool SE4 Lowest price', + 'start': '2024-11-05T02:00:00+00:00', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.06519', + }) +# --- # name: test_sensor[sensor.nord_pool_se4_next_price-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ From 10191e7a23acc4cc6c86aa86c72aa646ec711bbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Fri, 20 Dec 2024 08:55:00 +0100 Subject: [PATCH 495/677] Add async_register_backup_agents_listener to cloud/backup (#133584) * Add async_register_backup_agents_listener to cloud/backup * Coverage * more coverage --- homeassistant/components/cloud/backup.py | 30 ++++++++++++- homeassistant/components/cloud/const.py | 2 + homeassistant/components/cloud/http_api.py | 5 +++ tests/components/cloud/test_backup.py | 49 ++++++++++++++++++++++ tests/components/cloud/test_http_api.py | 42 +++++++++++++++++++ 5 files changed, 126 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index e826c229321..d21e28be50a 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -3,7 +3,7 @@ from __future__ import annotations import base64 -from collections.abc import AsyncIterator, Callable, Coroutine +from collections.abc import AsyncIterator, Callable, Coroutine, Mapping import hashlib from typing import Any, Self @@ -18,9 +18,10 @@ from hass_nabucasa.cloud_api import ( from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect from .client import CloudClient -from .const import DATA_CLOUD, DOMAIN +from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT _STORAGE_BACKUP = "backup" @@ -45,6 +46,31 @@ async def async_get_backup_agents( return [CloudBackupAgent(hass=hass, cloud=cloud)] +@callback +def async_register_backup_agents_listener( + hass: HomeAssistant, + *, + listener: Callable[[], None], + **kwargs: Any, +) -> Callable[[], None]: + """Register a listener to be called when agents are added or removed.""" + + @callback + def unsub() -> None: + """Unsubscribe from events.""" + unsub_signal() + + @callback + def handle_event(data: Mapping[str, Any]) -> None: + """Handle event.""" + if data["type"] not in ("login", "logout"): + return + listener() + + unsub_signal = async_dispatcher_connect(hass, EVENT_CLOUD_EVENT, handle_event) + return unsub + + class ChunkAsyncStreamIterator: """Async iterator for chunked streams. diff --git a/homeassistant/components/cloud/const.py b/homeassistant/components/cloud/const.py index 65d239f2b10..cff71bacebc 100644 --- a/homeassistant/components/cloud/const.py +++ b/homeassistant/components/cloud/const.py @@ -18,6 +18,8 @@ DATA_CLOUD: HassKey[Cloud[CloudClient]] = HassKey(DOMAIN) DATA_PLATFORMS_SETUP: HassKey[dict[str, asyncio.Event]] = HassKey( "cloud_platforms_setup" ) +EVENT_CLOUD_EVENT = "cloud_event" + REQUEST_TIMEOUT = 10 PREF_ENABLE_ALEXA = "alexa_enabled" diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index 2f49d261792..473f553593a 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -34,6 +34,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.util.location import async_detect_location_info from .alexa_config import entity_supported as entity_supported_by_alexa @@ -41,6 +42,7 @@ from .assist_pipeline import async_create_cloud_pipeline from .client import CloudClient from .const import ( DATA_CLOUD, + EVENT_CLOUD_EVENT, LOGIN_MFA_TIMEOUT, PREF_ALEXA_REPORT_STATE, PREF_DISABLE_2FA, @@ -278,6 +280,8 @@ class CloudLoginView(HomeAssistantView): new_cloud_pipeline_id = await async_create_cloud_pipeline(hass) else: new_cloud_pipeline_id = None + + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "login"}) return self.json({"success": True, "cloud_pipeline": new_cloud_pipeline_id}) @@ -297,6 +301,7 @@ class CloudLogoutView(HomeAssistantView): async with asyncio.timeout(REQUEST_TIMEOUT): await cloud.logout() + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "logout"}) return self.json_message("ok") diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 93747ca25f7..86b25d61d88 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -17,7 +17,10 @@ from homeassistant.components.backup import ( Folder, ) from homeassistant.components.cloud import DOMAIN +from homeassistant.components.cloud.backup import async_register_backup_agents_listener +from homeassistant.components.cloud.const import EVENT_CLOUD_EVENT from homeassistant.core import HomeAssistant +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.setup import async_setup_component from tests.test_util.aiohttp import AiohttpClientMocker @@ -576,3 +579,49 @@ async def test_agents_delete_not_found( assert response["success"] assert response["result"] == {"agent_errors": {}} + + +@pytest.mark.parametrize("event_type", ["login", "logout"]) +async def test_calling_listener_on_login_logout( + hass: HomeAssistant, + event_type: str, +) -> None: + """Test calling listener for login and logout events.""" + listener = MagicMock() + async_register_backup_agents_listener(hass, listener=listener) + + assert listener.call_count == 0 + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": event_type}) + await hass.async_block_till_done() + + assert listener.call_count == 1 + + +async def test_not_calling_listener_after_unsub(hass: HomeAssistant) -> None: + """Test only calling listener until unsub.""" + listener = MagicMock() + unsub = async_register_backup_agents_listener(hass, listener=listener) + + assert listener.call_count == 0 + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "login"}) + await hass.async_block_till_done() + assert listener.call_count == 1 + + unsub() + + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "login"}) + await hass.async_block_till_done() + assert listener.call_count == 1 + + +async def test_not_calling_listener_with_unknown_event_type( + hass: HomeAssistant, +) -> None: + """Test not calling listener if we did not get the expected event type.""" + listener = MagicMock() + async_register_backup_agents_listener(hass, listener=listener) + + assert listener.call_count == 0 + async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "unknown"}) + await hass.async_block_till_done() + assert listener.call_count == 0 diff --git a/tests/components/cloud/test_http_api.py b/tests/components/cloud/test_http_api.py index b35cc03ac73..d915f158af0 100644 --- a/tests/components/cloud/test_http_api.py +++ b/tests/components/cloud/test_http_api.py @@ -1819,3 +1819,45 @@ async def test_api_calls_require_admin( resp = await client.post(endpoint, json=data) assert resp.status == HTTPStatus.UNAUTHORIZED + + +async def test_login_view_dispatch_event( + hass: HomeAssistant, + cloud: MagicMock, + hass_client: ClientSessionGenerator, +) -> None: + """Test dispatching event while logging in.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, DOMAIN, {"cloud": {}}) + await hass.async_block_till_done() + + cloud_client = await hass_client() + + with patch( + "homeassistant.components.cloud.http_api.async_dispatcher_send" + ) as async_dispatcher_send_mock: + await cloud_client.post( + "/api/cloud/login", json={"email": "my_username", "password": "my_password"} + ) + + assert async_dispatcher_send_mock.call_count == 1 + assert async_dispatcher_send_mock.mock_calls[0][1][1] == "cloud_event" + assert async_dispatcher_send_mock.mock_calls[0][1][2] == {"type": "login"} + + +async def test_logout_view_dispatch_event( + cloud: MagicMock, + setup_cloud: None, + hass_client: ClientSessionGenerator, +) -> None: + """Test dispatching event while logging out.""" + cloud_client = await hass_client() + + with patch( + "homeassistant.components.cloud.http_api.async_dispatcher_send" + ) as async_dispatcher_send_mock: + await cloud_client.post("/api/cloud/logout") + + assert async_dispatcher_send_mock.call_count == 1 + assert async_dispatcher_send_mock.mock_calls[0][1][1] == "cloud_event" + assert async_dispatcher_send_mock.mock_calls[0][1][2] == {"type": "logout"} From 7e6392f062a015c344a634a703c3e1224766d1dc Mon Sep 17 00:00:00 2001 From: Jonas Fors Lellky Date: Fri, 20 Dec 2024 10:11:50 +0100 Subject: [PATCH 496/677] Define setpoints as constants in flexit_bacnet (#133580) * Define setpoints as consts * Use a regular comment instead of docstring * Un-indent comment --- .../components/flexit_bacnet/number.py | 49 +++++++++++++------ 1 file changed, 35 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/flexit_bacnet/number.py b/homeassistant/components/flexit_bacnet/number.py index 029ce896445..6e405e8e8ac 100644 --- a/homeassistant/components/flexit_bacnet/number.py +++ b/homeassistant/components/flexit_bacnet/number.py @@ -23,6 +23,9 @@ from . import FlexitCoordinator from .const import DOMAIN from .entity import FlexitEntity +_MAX_FAN_SETPOINT = 100 +_MIN_FAN_SETPOINT = 30 + @dataclass(kw_only=True, frozen=True) class FlexitNumberEntityDescription(NumberEntityDescription): @@ -34,6 +37,24 @@ class FlexitNumberEntityDescription(NumberEntityDescription): set_native_value_fn: Callable[[FlexitBACnet], Callable[[int], Awaitable[None]]] +# Setpoints for Away, Home and High are dependent of each other. Fireplace and Cooker Hood +# have setpoints between 0 (MIN_FAN_SETPOINT) and 100 (MAX_FAN_SETPOINT). +# See the table below for all the setpoints. +# +# | Mode | Setpoint | Min | Max | +# |:------------|----------|:----------------------|:----------------------| +# | HOME | Supply | AWAY Supply setpoint | 100 | +# | HOME | Extract | AWAY Extract setpoint | 100 | +# | AWAY | Supply | 30 | HOME Supply setpoint | +# | AWAY | Extract | 30 | HOME Extract setpoint | +# | HIGH | Supply | HOME Supply setpoint | 100 | +# | HIGH | Extract | HOME Extract setpoint | 100 | +# | COOKER_HOOD | Supply | 30 | 100 | +# | COOKER_HOOD | Extract | 30 | 100 | +# | FIREPLACE | Supply | 30 | 100 | +# | FIREPLACE | Extract | 30 | 100 | + + NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( FlexitNumberEntityDescription( key="away_extract_fan_setpoint", @@ -45,7 +66,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_away, native_unit_of_measurement=PERCENTAGE, native_max_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), - native_min_value_fn=lambda _: 30, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="away_supply_fan_setpoint", @@ -57,7 +78,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_away, native_unit_of_measurement=PERCENTAGE, native_max_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), - native_min_value_fn=lambda _: 30, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="cooker_hood_extract_fan_setpoint", @@ -68,8 +89,8 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_extract_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_cooker, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, - native_min_value_fn=lambda _: 30, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="cooker_hood_supply_fan_setpoint", @@ -80,8 +101,8 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_supply_air_cooker, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_cooker, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, - native_min_value_fn=lambda _: 30, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="fireplace_extract_fan_setpoint", @@ -92,8 +113,8 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_extract_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_fire, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, - native_min_value_fn=lambda _: 30, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="fireplace_supply_fan_setpoint", @@ -104,8 +125,8 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_supply_air_fire, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_fire, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, - native_min_value_fn=lambda _: 30, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, + native_min_value_fn=lambda _: _MIN_FAN_SETPOINT, ), FlexitNumberEntityDescription( key="high_extract_fan_setpoint", @@ -116,7 +137,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_extract_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_high, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_home), ), FlexitNumberEntityDescription( @@ -128,7 +149,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_supply_air_high, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_high, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_home), ), FlexitNumberEntityDescription( @@ -140,7 +161,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_extract_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_home, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_away), ), FlexitNumberEntityDescription( @@ -152,7 +173,7 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = ( native_value_fn=lambda device: device.fan_setpoint_supply_air_home, set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_home, native_unit_of_measurement=PERCENTAGE, - native_max_value_fn=lambda _: 100, + native_max_value_fn=lambda _: _MAX_FAN_SETPOINT, native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_away), ), ) From 6188db18c2dd82b3cd19c9cc60eb671e0d233202 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Sat, 14 Dec 2024 03:36:15 -0500 Subject: [PATCH 497/677] Bump `nice-go` to 1.0.0 (#133185) * Bump Nice G.O. to 1.0.0 * Mypy * Pytest --- homeassistant/components/nice_go/coordinator.py | 1 - homeassistant/components/nice_go/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/nice_go/fixtures/get_all_barriers.json | 4 ---- tests/components/nice_go/test_init.py | 1 - 6 files changed, 3 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/nice_go/coordinator.py b/homeassistant/components/nice_go/coordinator.py index 29c0d8233fe..07b20bbbf10 100644 --- a/homeassistant/components/nice_go/coordinator.py +++ b/homeassistant/components/nice_go/coordinator.py @@ -239,7 +239,6 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): ].type, # Device type is not sent in device state update, and it can't change, so we just reuse the existing one BarrierState( deviceId=raw_data["deviceId"], - desired=json.loads(raw_data["desired"]), reported=json.loads(raw_data["reported"]), connectionState=ConnectionState( connected=raw_data["connectionState"]["connected"], diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index 817d7ef9bc9..1af23ec4d9b 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["nice_go"], - "requirements": ["nice-go==0.3.10"] + "requirements": ["nice-go==1.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2858c92d182..aa9a05f447e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1460,7 +1460,7 @@ nextdns==4.0.0 nibe==2.13.0 # homeassistant.components.nice_go -nice-go==0.3.10 +nice-go==1.0.0 # homeassistant.components.niko_home_control niko-home-control==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f8565afc4b6..16ce4d87a62 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1220,7 +1220,7 @@ nextdns==4.0.0 nibe==2.13.0 # homeassistant.components.nice_go -nice-go==0.3.10 +nice-go==1.0.0 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 diff --git a/tests/components/nice_go/fixtures/get_all_barriers.json b/tests/components/nice_go/fixtures/get_all_barriers.json index 84799e0dd32..5a7607612c1 100644 --- a/tests/components/nice_go/fixtures/get_all_barriers.json +++ b/tests/components/nice_go/fixtures/get_all_barriers.json @@ -11,7 +11,6 @@ ], "state": { "deviceId": "1", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 1", "autoDisabled": false, @@ -42,7 +41,6 @@ ], "state": { "deviceId": "2", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 2", "autoDisabled": false, @@ -73,7 +71,6 @@ ], "state": { "deviceId": "3", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 3", "autoDisabled": false, @@ -101,7 +98,6 @@ ], "state": { "deviceId": "4", - "desired": { "key": "value" }, "reported": { "displayName": "Test Garage 4", "autoDisabled": false, diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py index 4eb3851516e..051c6623b23 100644 --- a/tests/components/nice_go/test_init.py +++ b/tests/components/nice_go/test_init.py @@ -81,7 +81,6 @@ async def test_firmware_update_required( "displayName": "test-display-name", "migrationStatus": "NOT_STARTED", }, - desired=None, connectionState=None, version=None, timestamp=None, From 8400ef844146294c2514c4adfc8514912377cc2c Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Wed, 18 Dec 2024 13:47:41 -0500 Subject: [PATCH 498/677] Add support for Nice G.O. HAE00080 wall station (#133186) --- homeassistant/components/nice_go/const.py | 4 ++-- homeassistant/components/nice_go/cover.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/nice_go/const.py b/homeassistant/components/nice_go/const.py index a6635368f7b..c02bcb3c234 100644 --- a/homeassistant/components/nice_go/const.py +++ b/homeassistant/components/nice_go/const.py @@ -15,8 +15,8 @@ CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time" REFRESH_TOKEN_EXPIRY_TIME = timedelta(days=30) SUPPORTED_DEVICE_TYPES = { - Platform.LIGHT: ["WallStation"], - Platform.SWITCH: ["WallStation"], + Platform.LIGHT: ["WallStation", "WallStation_ESP32"], + Platform.SWITCH: ["WallStation", "WallStation_ESP32"], } KNOWN_UNSUPPORTED_DEVICE_TYPES = { Platform.LIGHT: ["Mms100"], diff --git a/homeassistant/components/nice_go/cover.py b/homeassistant/components/nice_go/cover.py index a823e931804..6360e398b96 100644 --- a/homeassistant/components/nice_go/cover.py +++ b/homeassistant/components/nice_go/cover.py @@ -21,6 +21,7 @@ from .entity import NiceGOEntity DEVICE_CLASSES = { "WallStation": CoverDeviceClass.GARAGE, "Mms100": CoverDeviceClass.GATE, + "WallStation_ESP32": CoverDeviceClass.GARAGE, } PARALLEL_UPDATES = 1 From 59e6fa5138dcfa7b7e58d9ac8ce21f6e869d9b0f Mon Sep 17 00:00:00 2001 From: Ron Weikamp <15732230+ronweikamp@users.noreply.github.com> Date: Wed, 18 Dec 2024 10:41:46 +0100 Subject: [PATCH 499/677] Bugfix: also schedule time based integration when source is 0 (#133438) * Bugfix also schedule time based integration when source is 0 * Update tests/components/integration/test_sensor.py Co-authored-by: Diogo Gomes * Improve comment in test. Remove redundant assertion. --------- Co-authored-by: Diogo Gomes --- .../components/integration/sensor.py | 2 +- tests/components/integration/test_sensor.py | 33 +++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/integration/sensor.py b/homeassistant/components/integration/sensor.py index a053e5cea5c..27aa74d0785 100644 --- a/homeassistant/components/integration/sensor.py +++ b/homeassistant/components/integration/sensor.py @@ -576,7 +576,7 @@ class IntegrationSensor(RestoreSensor): if ( self._max_sub_interval is not None and source_state is not None - and (source_state_dec := _decimal_state(source_state.state)) + and (source_state_dec := _decimal_state(source_state.state)) is not None ): @callback diff --git a/tests/components/integration/test_sensor.py b/tests/components/integration/test_sensor.py index 974c8bb8691..07390cd9571 100644 --- a/tests/components/integration/test_sensor.py +++ b/tests/components/integration/test_sensor.py @@ -843,6 +843,39 @@ async def test_on_valid_source_expect_update_on_time( assert float(state.state) < 1.8 +async def test_on_0_source_expect_0_and_update_when_source_gets_positive( + hass: HomeAssistant, +) -> None: + """Test whether time based integration updates the integral on a valid zero source.""" + start_time = dt_util.utcnow() + + with freeze_time(start_time) as freezer: + await _setup_integral_sensor(hass, max_sub_interval=DEFAULT_MAX_SUB_INTERVAL) + await _update_source_sensor(hass, 0) + await hass.async_block_till_done() + + # wait one minute and one second + freezer.tick(61) + async_fire_time_changed(hass, dt_util.now()) + await hass.async_block_till_done() + + state = hass.states.get("sensor.integration") + + assert condition.async_numeric_state(hass, state) is True + assert float(state.state) == 0 # integral is 0 after integration of 0 + + # wait one second and update state + freezer.tick(1) + async_fire_time_changed(hass, dt_util.now()) + await _update_source_sensor(hass, 100) + await hass.async_block_till_done() + + state = hass.states.get("sensor.integration") + + # approx 100*1/3600 (right method after 1 second since last integration) + assert 0.027 < float(state.state) < 0.029 + + async def test_on_unvailable_source_expect_no_update_on_time( hass: HomeAssistant, ) -> None: From cf4dbcfebf5caaa8895a15eda332f5a3ca401a28 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 17 Dec 2024 18:57:43 -0700 Subject: [PATCH 500/677] Ensure screenlogic retries if the protocol adapter is still booting (#133444) * Ensure screenlogic retries if the protocol adapter is still booting If the protocol adapter is still booting, it will disconnect and never retry ``` Traceback (most recent call last): File "/usr/src/homeassistant/homeassistant/config_entries.py", line 640, in __async_setup_with_context result = await component.async_setup_entry(hass, self) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/src/homeassistant/homeassistant/components/screenlogic/__init__.py", line 65, in async_setup_entry await gateway.async_connect(**connect_info) File "/usr/local/lib/python3.13/site-packages/screenlogicpy/gateway.py", line 142, in async_connect connectPkg = await async_connect_to_gateway( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ...<4 lines>... ) ^ File "/usr/local/lib/python3.13/site-packages/screenlogicpy/requests/login.py", line 107, in async_connect_to_gateway mac_address = await async_gateway_connect(transport, protocol, max_retries) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.13/site-packages/screenlogicpy/requests/login.py", line 77, in async_gateway_connect raise ScreenLogicConnectionError("Host unexpectedly disconnected.") screenlogicpy.const.common.ScreenLogicConnectionError: Host unexpectedly disconnected. ``` * coverage --- .../components/screenlogic/__init__.py | 3 +- tests/components/screenlogic/test_init.py | 36 ++++++++++++++++++- 2 files changed, 37 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/screenlogic/__init__.py b/homeassistant/components/screenlogic/__init__.py index 6f58e9b3666..972837f7d75 100644 --- a/homeassistant/components/screenlogic/__init__.py +++ b/homeassistant/components/screenlogic/__init__.py @@ -4,6 +4,7 @@ import logging from typing import Any from screenlogicpy import ScreenLogicError, ScreenLogicGateway +from screenlogicpy.const.common import ScreenLogicConnectionError from screenlogicpy.const.data import SHARED_VALUES from homeassistant.config_entries import ConfigEntry @@ -64,7 +65,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ScreenLogicConfigEntry) try: await gateway.async_connect(**connect_info) await gateway.async_update() - except ScreenLogicError as ex: + except (ScreenLogicConnectionError, ScreenLogicError) as ex: raise ConfigEntryNotReady(ex.msg) from ex coordinator = ScreenlogicDataUpdateCoordinator( diff --git a/tests/components/screenlogic/test_init.py b/tests/components/screenlogic/test_init.py index 6416c93f779..f21a1118b4f 100644 --- a/tests/components/screenlogic/test_init.py +++ b/tests/components/screenlogic/test_init.py @@ -4,12 +4,14 @@ from dataclasses import dataclass from unittest.mock import DEFAULT, patch import pytest -from screenlogicpy import ScreenLogicGateway +from screenlogicpy import ScreenLogicError, ScreenLogicGateway +from screenlogicpy.const.common import ScreenLogicConnectionError from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN from homeassistant.components.screenlogic import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import slugify @@ -284,3 +286,35 @@ async def test_platform_setup( for entity_id in tested_entity_ids: assert hass.states.get(entity_id) is not None + + +@pytest.mark.parametrize( + "exception", + [ScreenLogicConnectionError, ScreenLogicError], +) +async def test_retry_on_connect_exception( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, exception: Exception +) -> None: + """Test setup retries on expected exceptions.""" + + def stub_connect(*args, **kwargs): + raise exception + + mock_config_entry.add_to_hass(hass) + + with ( + patch( + GATEWAY_DISCOVERY_IMPORT_PATH, + return_value={}, + ), + patch.multiple( + ScreenLogicGateway, + async_connect=stub_connect, + is_connected=False, + _async_connected_request=DEFAULT, + ), + ): + assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY From 709d15a79b2b694014d1aa5cf85d2237635ec428 Mon Sep 17 00:00:00 2001 From: Quentame Date: Fri, 20 Dec 2024 00:48:03 +0100 Subject: [PATCH 501/677] Bump Freebox to 1.2.1 (#133455) --- homeassistant/components/freebox/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/freebox/manifest.json b/homeassistant/components/freebox/manifest.json index ad7da1703b8..46422cee105 100644 --- a/homeassistant/components/freebox/manifest.json +++ b/homeassistant/components/freebox/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/freebox", "iot_class": "local_polling", "loggers": ["freebox_api"], - "requirements": ["freebox-api==1.1.0"], + "requirements": ["freebox-api==1.2.1"], "zeroconf": ["_fbx-api._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index aa9a05f447e..bf57f6e6223 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -937,7 +937,7 @@ forecast-solar==4.0.0 fortiosapi==1.0.5 # homeassistant.components.freebox -freebox-api==1.1.0 +freebox-api==1.2.1 # homeassistant.components.free_mobile freesms==0.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 16ce4d87a62..38ab0abd377 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -793,7 +793,7 @@ foobot_async==1.0.0 forecast-solar==4.0.0 # homeassistant.components.freebox -freebox-api==1.1.0 +freebox-api==1.2.1 # homeassistant.components.fritz # homeassistant.components.fritzbox_callmonitor From 1afeabfd6418cf572d81297f3a1851d315944fe3 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 10:19:57 +0100 Subject: [PATCH 502/677] Bump pyOverkiz to 1.15.3 (#133458) --- homeassistant/components/overkiz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 8c750aec6bd..9ab901d5005 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -20,7 +20,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.15.0"], + "requirements": ["pyoverkiz==1.15.3"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index bf57f6e6223..bec7a6e8a80 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2149,7 +2149,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.0 +pyoverkiz==1.15.3 # homeassistant.components.onewire pyownet==0.10.0.post1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 38ab0abd377..91be1265314 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1736,7 +1736,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.0 +pyoverkiz==1.15.3 # homeassistant.components.onewire pyownet==0.10.0.post1 From 92f50c63b1ad2e41dc9776aa552edd8d5f6a24e0 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Wed, 18 Dec 2024 11:05:52 +0100 Subject: [PATCH 503/677] Don't raise Overkiz user flow unique_id check (#133471) --- homeassistant/components/overkiz/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/config_flow.py b/homeassistant/components/overkiz/config_flow.py index 471a13d0de2..3829fb3160d 100644 --- a/homeassistant/components/overkiz/config_flow.py +++ b/homeassistant/components/overkiz/config_flow.py @@ -76,7 +76,7 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN): for gateway in gateways: if is_overkiz_gateway(gateway.id): gateway_id = gateway.id - await self.async_set_unique_id(gateway_id) + await self.async_set_unique_id(gateway_id, raise_on_progress=False) return user_input From 0140aa7240be0b705470795a93eb92897177a3fa Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Wed, 18 Dec 2024 10:22:39 -0500 Subject: [PATCH 504/677] Update Roborock to 2.8.1 (#133492) --- homeassistant/components/roborock/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index c305e4710fc..69d867aa164 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["roborock"], "requirements": [ - "python-roborock==2.7.2", + "python-roborock==2.8.1", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/requirements_all.txt b/requirements_all.txt index bec7a6e8a80..c3d35988dc7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2402,7 +2402,7 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.7.2 +python-roborock==2.8.1 # homeassistant.components.smarttub python-smarttub==0.0.38 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 91be1265314..c873ef8884b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1923,7 +1923,7 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.7.2 +python-roborock==2.8.1 # homeassistant.components.smarttub python-smarttub==0.0.38 From cd5a46f11daabcd1fc102c44ff2885e2d5dd0aa2 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Wed, 18 Dec 2024 16:53:15 +0100 Subject: [PATCH 505/677] =?UTF-8?q?Update=20fj=C3=A4r=C3=A5skupan=20to=202?= =?UTF-8?q?.3.1=20(#133493)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- homeassistant/components/fjaraskupan/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/fjaraskupan/manifest.json b/homeassistant/components/fjaraskupan/manifest.json index 91c74b68e01..cc368b3e92f 100644 --- a/homeassistant/components/fjaraskupan/manifest.json +++ b/homeassistant/components/fjaraskupan/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/fjaraskupan", "iot_class": "local_polling", "loggers": ["bleak", "fjaraskupan"], - "requirements": ["fjaraskupan==2.3.0"] + "requirements": ["fjaraskupan==2.3.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index c3d35988dc7..fa07c4e10cc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -912,7 +912,7 @@ fivem-api==0.1.2 fixerio==1.0.0a0 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.0 +fjaraskupan==2.3.1 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c873ef8884b..3f444c9a59b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -771,7 +771,7 @@ fitbit==0.3.1 fivem-api==0.1.2 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.0 +fjaraskupan==2.3.1 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 From f8e1a786beec662f8798f6508823a87016112da7 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Wed, 18 Dec 2024 18:03:27 +0100 Subject: [PATCH 506/677] =?UTF-8?q?Update=20fj=C3=A4r=C3=A5skupan=20to=202?= =?UTF-8?q?.3.2=20(#133499)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- homeassistant/components/fjaraskupan/light.py | 3 --- homeassistant/components/fjaraskupan/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/fjaraskupan/light.py b/homeassistant/components/fjaraskupan/light.py index b33904c805d..f0083591d4d 100644 --- a/homeassistant/components/fjaraskupan/light.py +++ b/homeassistant/components/fjaraskupan/light.py @@ -4,8 +4,6 @@ from __future__ import annotations from typing import Any -from fjaraskupan import COMMAND_LIGHT_ON_OFF - from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -62,7 +60,6 @@ class Light(CoordinatorEntity[FjaraskupanCoordinator], LightEntity): if self.is_on: async with self.coordinator.async_connect_and_update() as device: await device.send_dim(0) - await device.send_command(COMMAND_LIGHT_ON_OFF) @property def is_on(self) -> bool: diff --git a/homeassistant/components/fjaraskupan/manifest.json b/homeassistant/components/fjaraskupan/manifest.json index cc368b3e92f..2fd49aac5ee 100644 --- a/homeassistant/components/fjaraskupan/manifest.json +++ b/homeassistant/components/fjaraskupan/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/fjaraskupan", "iot_class": "local_polling", "loggers": ["bleak", "fjaraskupan"], - "requirements": ["fjaraskupan==2.3.1"] + "requirements": ["fjaraskupan==2.3.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index fa07c4e10cc..b2a8f8fcade 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -912,7 +912,7 @@ fivem-api==0.1.2 fixerio==1.0.0a0 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.1 +fjaraskupan==2.3.2 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3f444c9a59b..dac319fca5f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -771,7 +771,7 @@ fitbit==0.3.1 fivem-api==0.1.2 # homeassistant.components.fjaraskupan -fjaraskupan==2.3.1 +fjaraskupan==2.3.2 # homeassistant.components.flexit_bacnet flexit_bacnet==2.2.1 From 13f32c6720b4abc622c1866d03583dc0e5005946 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Wed, 18 Dec 2024 19:39:35 +0100 Subject: [PATCH 507/677] Bump gardena_bluetooth to 1.5.0 (#133502) --- homeassistant/components/gardena_bluetooth/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/gardena_bluetooth/manifest.json b/homeassistant/components/gardena_bluetooth/manifest.json index da5c08c38c5..28bba1015f5 100644 --- a/homeassistant/components/gardena_bluetooth/manifest.json +++ b/homeassistant/components/gardena_bluetooth/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/gardena_bluetooth", "iot_class": "local_polling", "loggers": ["bleak", "bleak_esphome", "gardena_bluetooth"], - "requirements": ["gardena-bluetooth==1.4.4"] + "requirements": ["gardena-bluetooth==1.5.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index b2a8f8fcade..6f824059923 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -953,7 +953,7 @@ fyta_cli==0.7.0 gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.4 +gardena-bluetooth==1.5.0 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index dac319fca5f..e82e13e934a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -806,7 +806,7 @@ fyta_cli==0.7.0 gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.4 +gardena-bluetooth==1.5.0 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 From 367749d93ce95ed44a43c086a41ad3537181283f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 18 Dec 2024 12:48:39 -1000 Subject: [PATCH 508/677] Bump aiohttp to 3.11.11 (#133530) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 5d7df8a2ff5..4906e479812 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -5,7 +5,7 @@ aiodiscover==2.1.0 aiodns==3.2.0 aiohasupervisor==0.2.1 aiohttp-fast-zlib==0.2.0 -aiohttp==3.11.10 +aiohttp==3.11.11 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index 6b640bce4d0..9ddbb5be347 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 "aiohasupervisor==0.2.1", - "aiohttp==3.11.10", + "aiohttp==3.11.11", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index ad3cff221f7..8e3e8c06882 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.2.1 -aiohttp==3.11.10 +aiohttp==3.11.11 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 aiozoneinfo==0.2.1 From 8c1a18b383c8414dde275abc1e4998ad9dca8695 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Thu, 19 Dec 2024 19:00:18 +0100 Subject: [PATCH 509/677] Handle null value for elapsed time in Music Assistant (#133597) --- homeassistant/components/music_assistant/media_player.py | 8 ++------ tests/components/music_assistant/fixtures/players.json | 2 +- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index fdf3a0c0c48..2345643868c 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -566,17 +566,13 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): # shuffle and repeat are not (yet) supported for external sources self._attr_shuffle = None self._attr_repeat = None - if TYPE_CHECKING: - assert player.elapsed_time is not None - self._attr_media_position = int(player.elapsed_time) + self._attr_media_position = int(player.elapsed_time or 0) self._attr_media_position_updated_at = ( utc_from_timestamp(player.elapsed_time_last_updated) if player.elapsed_time_last_updated else None ) - if TYPE_CHECKING: - assert player.elapsed_time is not None - self._prev_time = player.elapsed_time + self._prev_time = player.elapsed_time or 0 return if queue is None: diff --git a/tests/components/music_assistant/fixtures/players.json b/tests/components/music_assistant/fixtures/players.json index 2d8b88d0e8e..8a08a55dc45 100644 --- a/tests/components/music_assistant/fixtures/players.json +++ b/tests/components/music_assistant/fixtures/players.json @@ -20,7 +20,7 @@ "power", "enqueue" ], - "elapsed_time": 0, + "elapsed_time": null, "elapsed_time_last_updated": 0, "state": "idle", "volume_level": 20, From fdde9d3a52f252e00b8cd5d8661ba05a9ad09400 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 19 Dec 2024 20:27:08 +0100 Subject: [PATCH 510/677] Fix Twinkly raise on progress (#133601) --- .../components/twinkly/config_flow.py | 4 +- tests/components/twinkly/test_config_flow.py | 37 +++++++++++++++++++ 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/twinkly/config_flow.py b/homeassistant/components/twinkly/config_flow.py index 68c455dc619..837bd9ccb6a 100644 --- a/homeassistant/components/twinkly/config_flow.py +++ b/homeassistant/components/twinkly/config_flow.py @@ -45,7 +45,9 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): except (TimeoutError, ClientError): errors[CONF_HOST] = "cannot_connect" else: - await self.async_set_unique_id(device_info[DEV_ID]) + await self.async_set_unique_id( + device_info[DEV_ID], raise_on_progress=False + ) self._abort_if_unique_id_configured() return self._create_entry_from_device(device_info, host) diff --git a/tests/components/twinkly/test_config_flow.py b/tests/components/twinkly/test_config_flow.py index 9b9aeafd082..8d8e955291e 100644 --- a/tests/components/twinkly/test_config_flow.py +++ b/tests/components/twinkly/test_config_flow.py @@ -5,6 +5,7 @@ from unittest.mock import patch from homeassistant import config_entries from homeassistant.components import dhcp from homeassistant.components.twinkly.const import DOMAIN as TWINKLY_DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -157,3 +158,39 @@ async def test_dhcp_already_exists(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_user_flow_works_discovery(hass: HomeAssistant) -> None: + """Test user flow can continue after discovery happened.""" + client = ClientMock() + with ( + patch( + "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client + ), + patch("homeassistant.components.twinkly.async_setup_entry", return_value=True), + ): + await hass.config_entries.flow.async_init( + TWINKLY_DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + hostname="Twinkly_XYZ", + ip="1.2.3.4", + macaddress="aabbccddeeff", + ), + ) + result = await hass.config_entries.flow.async_init( + TWINKLY_DOMAIN, + context={"source": SOURCE_USER}, + ) + assert len(hass.config_entries.flow.async_progress(TWINKLY_DOMAIN)) == 2 + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "10.0.0.131"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # Verify the discovery flow was aborted + assert not hass.config_entries.flow.async_progress(TWINKLY_DOMAIN) From ff9df15cb0bb26abfb3b229ee58a9f1646045df6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 19 Dec 2024 10:39:39 -1000 Subject: [PATCH 511/677] Handle mqtt.WebsocketConnectionError when connecting to the MQTT broker (#133610) fixes #132985 --- homeassistant/components/mqtt/client.py | 2 +- tests/components/mqtt/test_client.py | 11 +++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index ee6f02912b2..0dcd7b2014b 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -661,7 +661,7 @@ class MQTT: self.conf.get(CONF_PORT, DEFAULT_PORT), self.conf.get(CONF_KEEPALIVE, DEFAULT_KEEPALIVE), ) - except OSError as err: + except (OSError, mqtt.WebsocketConnectionError) as err: _LOGGER.error("Failed to connect to MQTT server due to exception: %s", err) self._async_connection_result(False) finally: diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 4bfcde752ae..1878045a9b9 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -1403,8 +1403,15 @@ async def test_handle_mqtt_timeout_on_callback( assert not mock_debouncer.is_set() +@pytest.mark.parametrize( + "exception", + [ + OSError("Connection error"), + paho_mqtt.WebsocketConnectionError("Connection error"), + ], +) async def test_setup_raises_config_entry_not_ready_if_no_connect_broker( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, exception: Exception ) -> None: """Test for setup failure if connection to broker is missing.""" entry = MockConfigEntry(domain=mqtt.DOMAIN, data={mqtt.CONF_BROKER: "test-broker"}) @@ -1413,7 +1420,7 @@ async def test_setup_raises_config_entry_not_ready_if_no_connect_broker( with patch( "homeassistant.components.mqtt.async_client.AsyncMQTTClient" ) as mock_client: - mock_client().connect = MagicMock(side_effect=OSError("Connection error")) + mock_client().connect = MagicMock(side_effect=exception) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert "Failed to connect to MQTT server due to exception:" in caplog.text From e7bdf1467bc67ff74203b6fe54f959b998870b8d Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 20 Dec 2024 09:51:57 +0000 Subject: [PATCH 512/677] Bump version to 2024.12.5 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 21f805bae72..417fa94e048 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 12 -PATCH_VERSION: Final = "4" +PATCH_VERSION: Final = "5" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 9ddbb5be347..58a1e8c1659 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.12.4" +version = "2024.12.5" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From b391dfe6476386d2b713acc5c5f9ad5a6a25a17d Mon Sep 17 00:00:00 2001 From: Kenny Root Date: Fri, 20 Dec 2024 01:59:30 -0800 Subject: [PATCH 513/677] Switch to official Zabbix Python API (#131674) --- CODEOWNERS | 1 + homeassistant/components/zabbix/__init__.py | 26 +++++++++---------- homeassistant/components/zabbix/manifest.json | 6 ++--- homeassistant/components/zabbix/sensor.py | 2 +- requirements_all.txt | 6 ++--- 5 files changed, 21 insertions(+), 20 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 382fbffecaa..0e2934b1f49 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1742,6 +1742,7 @@ build.json @home-assistant/supervisor /tests/components/youless/ @gjong /homeassistant/components/youtube/ @joostlek /tests/components/youtube/ @joostlek +/homeassistant/components/zabbix/ @kruton /homeassistant/components/zamg/ @killer0071234 /tests/components/zamg/ @killer0071234 /homeassistant/components/zengge/ @emontnemery diff --git a/homeassistant/components/zabbix/__init__.py b/homeassistant/components/zabbix/__init__.py index d9bab3e6fe4..05881d649cf 100644 --- a/homeassistant/components/zabbix/__init__.py +++ b/homeassistant/components/zabbix/__init__.py @@ -11,8 +11,9 @@ import time from urllib.error import HTTPError from urllib.parse import urljoin -from pyzabbix import ZabbixAPI, ZabbixAPIException, ZabbixMetric, ZabbixSender import voluptuous as vol +from zabbix_utils import ItemValue, Sender, ZabbixAPI +from zabbix_utils.exceptions import APIRequestError from homeassistant.const import ( CONF_HOST, @@ -42,6 +43,7 @@ CONF_PUBLISH_STATES_HOST = "publish_states_host" DEFAULT_SSL = False DEFAULT_PATH = "zabbix" +DEFAULT_SENDER_PORT = 10051 TIMEOUT = 5 RETRY_DELAY = 20 @@ -86,7 +88,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: try: zapi = ZabbixAPI(url=url, user=username, password=password) _LOGGER.debug("Connected to Zabbix API Version %s", zapi.api_version()) - except ZabbixAPIException as login_exception: + except APIRequestError as login_exception: _LOGGER.error("Unable to login to the Zabbix API: %s", login_exception) return False except HTTPError as http_error: @@ -104,7 +106,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: def event_to_metrics( event: Event, float_keys: set[str], string_keys: set[str] - ) -> list[ZabbixMetric] | None: + ) -> list[ItemValue] | None: """Add an event to the outgoing Zabbix list.""" state = event.data.get("new_state") if state is None or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE): @@ -145,14 +147,14 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: float_keys.update(floats) if len(float_keys) != float_keys_count: floats_discovery = [{"{#KEY}": float_key} for float_key in float_keys] - metric = ZabbixMetric( + metric = ItemValue( publish_states_host, "homeassistant.floats_discovery", json.dumps(floats_discovery), ) metrics.append(metric) for key, value in floats.items(): - metric = ZabbixMetric( + metric = ItemValue( publish_states_host, f"homeassistant.float[{key}]", value ) metrics.append(metric) @@ -161,7 +163,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: return metrics if publish_states_host: - zabbix_sender = ZabbixSender(zabbix_server=conf[CONF_HOST]) + zabbix_sender = Sender(server=conf[CONF_HOST], port=DEFAULT_SENDER_PORT) instance = ZabbixThread(zabbix_sender, event_to_metrics) instance.setup(hass) @@ -175,10 +177,8 @@ class ZabbixThread(threading.Thread): def __init__( self, - zabbix_sender: ZabbixSender, - event_to_metrics: Callable[ - [Event, set[str], set[str]], list[ZabbixMetric] | None - ], + zabbix_sender: Sender, + event_to_metrics: Callable[[Event, set[str], set[str]], list[ItemValue] | None], ) -> None: """Initialize the listener.""" threading.Thread.__init__(self, name="Zabbix") @@ -208,12 +208,12 @@ class ZabbixThread(threading.Thread): item = (time.monotonic(), event) self.queue.put(item) - def get_metrics(self) -> tuple[int, list[ZabbixMetric]]: + def get_metrics(self) -> tuple[int, list[ItemValue]]: """Return a batch of events formatted for writing.""" queue_seconds = QUEUE_BACKLOG_SECONDS + self.MAX_TRIES * RETRY_DELAY count = 0 - metrics: list[ZabbixMetric] = [] + metrics: list[ItemValue] = [] dropped = 0 @@ -243,7 +243,7 @@ class ZabbixThread(threading.Thread): return count, metrics - def write_to_zabbix(self, metrics: list[ZabbixMetric]) -> None: + def write_to_zabbix(self, metrics: list[ItemValue]) -> None: """Write preprocessed events to zabbix, with retry.""" for retry in range(self.MAX_TRIES + 1): diff --git a/homeassistant/components/zabbix/manifest.json b/homeassistant/components/zabbix/manifest.json index 9c7171bea46..86389d2b839 100644 --- a/homeassistant/components/zabbix/manifest.json +++ b/homeassistant/components/zabbix/manifest.json @@ -1,10 +1,10 @@ { "domain": "zabbix", "name": "Zabbix", - "codeowners": [], + "codeowners": ["@kruton"], "documentation": "https://www.home-assistant.io/integrations/zabbix", "iot_class": "local_polling", - "loggers": ["pyzabbix"], + "loggers": ["zabbix_utils"], "quality_scale": "legacy", - "requirements": ["py-zabbix==1.1.7"] + "requirements": ["zabbix-utils==2.0.1"] } diff --git a/homeassistant/components/zabbix/sensor.py b/homeassistant/components/zabbix/sensor.py index f5d96f106cb..7728233ebc0 100644 --- a/homeassistant/components/zabbix/sensor.py +++ b/homeassistant/components/zabbix/sensor.py @@ -6,8 +6,8 @@ from collections.abc import Mapping import logging from typing import Any -from pyzabbix import ZabbixAPI import voluptuous as vol +from zabbix_utils import ZabbixAPI from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, diff --git a/requirements_all.txt b/requirements_all.txt index a4f61fde797..dfeb83cc176 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1723,9 +1723,6 @@ py-sucks==0.9.10 # homeassistant.components.synology_dsm py-synologydsm-api==2.5.3 -# homeassistant.components.zabbix -py-zabbix==1.1.7 - # homeassistant.components.atome pyAtome==0.1.1 @@ -3084,6 +3081,9 @@ youtubeaio==1.1.5 # homeassistant.components.media_extractor yt-dlp[default]==2024.12.13 +# homeassistant.components.zabbix +zabbix-utils==2.0.1 + # homeassistant.components.zamg zamg==0.3.6 From 3df992790d8cca2c2e13e828bb41254fb8ee072e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 10:59:52 +0100 Subject: [PATCH 514/677] Bump aiohasupervisor to version 0.2.2b3 (#133631) --- homeassistant/components/hassio/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index 70230701965..d2cf790219c 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.2.2b2"], + "requirements": ["aiohasupervisor==0.2.2b3"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 62de8720278..dae92035b11 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,7 +3,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.2b2 +aiohasupervisor==0.2.2b3 aiohttp-fast-zlib==0.2.0 aiohttp==3.11.11 aiohttp_cors==0.7.0 diff --git a/pyproject.toml b/pyproject.toml index af79a173bab..171ca69dac0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ # Integrations may depend on hassio integration without listing it to # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.2b2", + "aiohasupervisor==0.2.2b3", "aiohttp==3.11.11", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", diff --git a/requirements.txt b/requirements.txt index a6fda6760d4..9f1615b37f2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.2b2 +aiohasupervisor==0.2.2b3 aiohttp==3.11.11 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index dfeb83cc176..e6a28cd6f41 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -261,7 +261,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b2 +aiohasupervisor==0.2.2b3 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d374203a614..788f0faff5f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -246,7 +246,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b2 +aiohasupervisor==0.2.2b3 # homeassistant.components.homekit_controller aiohomekit==3.2.7 From bddd8624bbf9c2fbc54335bf69f43513d768385b Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Fri, 20 Dec 2024 12:24:15 +0100 Subject: [PATCH 515/677] Add scale support to lamarzocco (#133335) --- .../components/lamarzocco/binary_sensor.py | 47 ++++++- .../components/lamarzocco/coordinator.py | 26 +++- homeassistant/components/lamarzocco/entity.py | 24 ++++ .../components/lamarzocco/icons.json | 10 ++ homeassistant/components/lamarzocco/number.py | 58 ++++++++- .../components/lamarzocco/quality_scale.yaml | 8 +- homeassistant/components/lamarzocco/select.py | 59 ++++++++- homeassistant/components/lamarzocco/sensor.py | 47 ++++++- .../components/lamarzocco/strings.json | 10 ++ tests/components/lamarzocco/conftest.py | 5 +- .../lamarzocco/fixtures/config_mini.json | 116 ++++++++++++++++++ .../snapshots/test_binary_sensor.ambr | 47 +++++++ .../lamarzocco/snapshots/test_init.ambr | 32 +++++ .../lamarzocco/snapshots/test_number.ambr | 116 +++++++++++++++++- .../lamarzocco/snapshots/test_select.ambr | 55 +++++++++ .../lamarzocco/snapshots/test_sensor.ambr | 51 ++++++++ .../lamarzocco/test_binary_sensor.py | 68 ++++++++++ tests/components/lamarzocco/test_init.py | 52 +++++++- tests/components/lamarzocco/test_number.py | 93 +++++++++++++- tests/components/lamarzocco/test_select.py | 97 ++++++++++++++- tests/components/lamarzocco/test_sensor.py | 69 ++++++++++- 21 files changed, 1059 insertions(+), 31 deletions(-) create mode 100644 tests/components/lamarzocco/fixtures/config_mini.json diff --git a/homeassistant/components/lamarzocco/binary_sensor.py b/homeassistant/components/lamarzocco/binary_sensor.py index 3d11992e7c1..e36b53bc993 100644 --- a/homeassistant/components/lamarzocco/binary_sensor.py +++ b/homeassistant/components/lamarzocco/binary_sensor.py @@ -3,6 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass +from pylamarzocco.const import MachineModel from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.binary_sensor import ( @@ -15,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import LaMarzoccoConfigEntry -from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity # Coordinator is used to centralize the data updates PARALLEL_UPDATES = 0 @@ -28,7 +29,7 @@ class LaMarzoccoBinarySensorEntityDescription( ): """Description of a La Marzocco binary sensor.""" - is_on_fn: Callable[[LaMarzoccoMachineConfig], bool] + is_on_fn: Callable[[LaMarzoccoMachineConfig], bool | None] ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = ( @@ -57,6 +58,15 @@ ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = ( ), ) +SCALE_ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = ( + LaMarzoccoBinarySensorEntityDescription( + key="connected", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + is_on_fn=lambda config: config.scale.connected if config.scale else None, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -66,11 +76,30 @@ async def async_setup_entry( """Set up binary sensor entities.""" coordinator = entry.runtime_data.config_coordinator - async_add_entities( + entities = [ LaMarzoccoBinarySensorEntity(coordinator, description) for description in ENTITIES if description.supported_fn(coordinator) - ) + ] + + if ( + coordinator.device.model == MachineModel.LINEA_MINI + and coordinator.device.config.scale + ): + entities.extend( + LaMarzoccoScaleBinarySensorEntity(coordinator, description) + for description in SCALE_ENTITIES + ) + + def _async_add_new_scale() -> None: + async_add_entities( + LaMarzoccoScaleBinarySensorEntity(coordinator, description) + for description in SCALE_ENTITIES + ) + + coordinator.new_device_callback.append(_async_add_new_scale) + + async_add_entities(entities) class LaMarzoccoBinarySensorEntity(LaMarzoccoEntity, BinarySensorEntity): @@ -79,6 +108,14 @@ class LaMarzoccoBinarySensorEntity(LaMarzoccoEntity, BinarySensorEntity): entity_description: LaMarzoccoBinarySensorEntityDescription @property - def is_on(self) -> bool: + def is_on(self) -> bool | None: """Return true if the binary sensor is on.""" return self.entity_description.is_on_fn(self.coordinator.device.config) + + +class LaMarzoccoScaleBinarySensorEntity( + LaMarzoccoBinarySensorEntity, LaMarzoccScaleEntity +): + """Binary sensor for La Marzocco scales.""" + + entity_description: LaMarzoccoBinarySensorEntityDescription diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index aca84fc4660..0b07409adb5 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -3,6 +3,7 @@ from __future__ import annotations from abc import abstractmethod +from collections.abc import Callable from dataclasses import dataclass from datetime import timedelta import logging @@ -14,8 +15,9 @@ from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_STOP -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed +import homeassistant.helpers.device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN @@ -62,6 +64,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): self.device = device self.local_connection_configured = local_client is not None self._local_client = local_client + self.new_device_callback: list[Callable] = [] async def _async_update_data(self) -> None: """Do the data update.""" @@ -86,6 +89,8 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): """Class to handle fetching data from the La Marzocco API centrally.""" + _scale_address: str | None = None + async def _async_setup(self) -> None: """Set up the coordinator.""" if self._local_client is not None: @@ -118,6 +123,25 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): """Fetch data from API endpoint.""" await self.device.get_config() _LOGGER.debug("Current status: %s", str(self.device.config)) + self._async_add_remove_scale() + + @callback + def _async_add_remove_scale(self) -> None: + """Add or remove a scale when added or removed.""" + if self.device.config.scale and not self._scale_address: + self._scale_address = self.device.config.scale.address + for scale_callback in self.new_device_callback: + scale_callback() + elif not self.device.config.scale and self._scale_address: + device_registry = dr.async_get(self.hass) + if device := device_registry.async_get_device( + identifiers={(DOMAIN, self._scale_address)} + ): + device_registry.async_update_device( + device_id=device.id, + remove_config_entry_id=self.config_entry.entry_id, + ) + self._scale_address = None class LaMarzoccoFirmwareUpdateCoordinator(LaMarzoccoUpdateCoordinator): diff --git a/homeassistant/components/lamarzocco/entity.py b/homeassistant/components/lamarzocco/entity.py index c3385eebd52..3e70ff1acdf 100644 --- a/homeassistant/components/lamarzocco/entity.py +++ b/homeassistant/components/lamarzocco/entity.py @@ -2,6 +2,7 @@ from collections.abc import Callable from dataclasses import dataclass +from typing import TYPE_CHECKING from pylamarzocco.const import FirmwareType from pylamarzocco.devices.machine import LaMarzoccoMachine @@ -85,3 +86,26 @@ class LaMarzoccoEntity(LaMarzoccoBaseEntity): """Initialize the entity.""" super().__init__(coordinator, entity_description.key) self.entity_description = entity_description + + +class LaMarzoccScaleEntity(LaMarzoccoEntity): + """Common class for scale.""" + + def __init__( + self, + coordinator: LaMarzoccoUpdateCoordinator, + entity_description: LaMarzoccoEntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator, entity_description) + scale = coordinator.device.config.scale + if TYPE_CHECKING: + assert scale + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, scale.address)}, + name=scale.name, + manufacturer="Acaia", + model="Lunar", + model_id="Y.301", + via_device=(DOMAIN, coordinator.device.serial_number), + ) diff --git a/homeassistant/components/lamarzocco/icons.json b/homeassistant/components/lamarzocco/icons.json index 860da12ddd9..79267b4abd4 100644 --- a/homeassistant/components/lamarzocco/icons.json +++ b/homeassistant/components/lamarzocco/icons.json @@ -43,6 +43,9 @@ "preinfusion_off": { "default": "mdi:water" }, + "scale_target": { + "default": "mdi:scale-balance" + }, "smart_standby_time": { "default": "mdi:timer" }, @@ -54,6 +57,13 @@ } }, "select": { + "active_bbw": { + "default": "mdi:alpha-u", + "state": { + "a": "mdi:alpha-a", + "b": "mdi:alpha-b" + } + }, "smart_standby_mode": { "default": "mdi:power", "state": { diff --git a/homeassistant/components/lamarzocco/number.py b/homeassistant/components/lamarzocco/number.py index a1389769194..44b582fbf1a 100644 --- a/homeassistant/components/lamarzocco/number.py +++ b/homeassistant/components/lamarzocco/number.py @@ -33,7 +33,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator -from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity PARALLEL_UPDATES = 1 @@ -56,7 +56,9 @@ class LaMarzoccoKeyNumberEntityDescription( ): """Description of an La Marzocco number entity with keys.""" - native_value_fn: Callable[[LaMarzoccoMachineConfig, PhysicalKey], float | int] + native_value_fn: Callable[ + [LaMarzoccoMachineConfig, PhysicalKey], float | int | None + ] set_value_fn: Callable[ [LaMarzoccoMachine, float | int, PhysicalKey], Coroutine[Any, Any, bool] ] @@ -203,6 +205,27 @@ KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = ( ), ) +SCALE_KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = ( + LaMarzoccoKeyNumberEntityDescription( + key="scale_target", + translation_key="scale_target", + native_step=PRECISION_WHOLE, + native_min_value=1, + native_max_value=100, + entity_category=EntityCategory.CONFIG, + set_value_fn=lambda machine, weight, key: machine.set_bbw_recipe_target( + key, int(weight) + ), + native_value_fn=lambda config, key: ( + config.bbw_settings.doses[key] if config.bbw_settings else None + ), + supported_fn=( + lambda coordinator: coordinator.device.model == MachineModel.LINEA_MINI + and coordinator.device.config.scale is not None + ), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -224,6 +247,27 @@ async def async_setup_entry( LaMarzoccoKeyNumberEntity(coordinator, description, key) for key in range(min(num_keys, 1), num_keys + 1) ) + + for description in SCALE_KEY_ENTITIES: + if description.supported_fn(coordinator): + if bbw_settings := coordinator.device.config.bbw_settings: + entities.extend( + LaMarzoccoScaleTargetNumberEntity( + coordinator, description, int(key) + ) + for key in bbw_settings.doses + ) + + def _async_add_new_scale() -> None: + if bbw_settings := coordinator.device.config.bbw_settings: + async_add_entities( + LaMarzoccoScaleTargetNumberEntity(coordinator, description, int(key)) + for description in SCALE_KEY_ENTITIES + for key in bbw_settings.doses + ) + + coordinator.new_device_callback.append(_async_add_new_scale) + async_add_entities(entities) @@ -281,7 +325,7 @@ class LaMarzoccoKeyNumberEntity(LaMarzoccoEntity, NumberEntity): self.pyhsical_key = pyhsical_key @property - def native_value(self) -> float: + def native_value(self) -> float | None: """Return the current value.""" return self.entity_description.native_value_fn( self.coordinator.device.config, PhysicalKey(self.pyhsical_key) @@ -305,3 +349,11 @@ class LaMarzoccoKeyNumberEntity(LaMarzoccoEntity, NumberEntity): }, ) from exc self.async_write_ha_state() + + +class LaMarzoccoScaleTargetNumberEntity( + LaMarzoccoKeyNumberEntity, LaMarzoccScaleEntity +): + """Entity representing a key number on the scale.""" + + entity_description: LaMarzoccoKeyNumberEntityDescription diff --git a/homeassistant/components/lamarzocco/quality_scale.yaml b/homeassistant/components/lamarzocco/quality_scale.yaml index 3677bd8d6b8..b03f661c7b7 100644 --- a/homeassistant/components/lamarzocco/quality_scale.yaml +++ b/homeassistant/components/lamarzocco/quality_scale.yaml @@ -62,9 +62,9 @@ rules: docs-troubleshooting: done docs-use-cases: done dynamic-devices: - status: exempt + status: done comment: | - Device type integration. + Device type integration, only possible for addon scale entity-category: done entity-device-class: done entity-disabled-by-default: done @@ -74,9 +74,9 @@ rules: reconfiguration-flow: done repair-issues: done stale-devices: - status: exempt + status: done comment: | - Device type integration. + Device type integration, only possible for addon scale # Platinum async-dependency: done diff --git a/homeassistant/components/lamarzocco/select.py b/homeassistant/components/lamarzocco/select.py index 595c157b823..7acb654f0d2 100644 --- a/homeassistant/components/lamarzocco/select.py +++ b/homeassistant/components/lamarzocco/select.py @@ -4,7 +4,13 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from pylamarzocco.const import ( + MachineModel, + PhysicalKey, + PrebrewMode, + SmartStandbyMode, + SteamLevel, +) from pylamarzocco.devices.machine import LaMarzoccoMachine from pylamarzocco.exceptions import RequestNotSuccessful from pylamarzocco.models import LaMarzoccoMachineConfig @@ -17,7 +23,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN from .coordinator import LaMarzoccoConfigEntry -from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity PARALLEL_UPDATES = 1 @@ -52,7 +58,7 @@ class LaMarzoccoSelectEntityDescription( ): """Description of a La Marzocco select entity.""" - current_option_fn: Callable[[LaMarzoccoMachineConfig], str] + current_option_fn: Callable[[LaMarzoccoMachineConfig], str | None] select_option_fn: Callable[[LaMarzoccoMachine, str], Coroutine[Any, Any, bool]] @@ -100,6 +106,22 @@ ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = ( ), ) +SCALE_ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = ( + LaMarzoccoSelectEntityDescription( + key="active_bbw", + translation_key="active_bbw", + options=["a", "b"], + select_option_fn=lambda machine, option: machine.set_active_bbw_recipe( + PhysicalKey[option.upper()] + ), + current_option_fn=lambda config: ( + config.bbw_settings.active_dose.name.lower() + if config.bbw_settings + else None + ), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -109,11 +131,30 @@ async def async_setup_entry( """Set up select entities.""" coordinator = entry.runtime_data.config_coordinator - async_add_entities( + entities = [ LaMarzoccoSelectEntity(coordinator, description) for description in ENTITIES if description.supported_fn(coordinator) - ) + ] + + if ( + coordinator.device.model == MachineModel.LINEA_MINI + and coordinator.device.config.scale + ): + entities.extend( + LaMarzoccoScaleSelectEntity(coordinator, description) + for description in SCALE_ENTITIES + ) + + def _async_add_new_scale() -> None: + async_add_entities( + LaMarzoccoScaleSelectEntity(coordinator, description) + for description in SCALE_ENTITIES + ) + + coordinator.new_device_callback.append(_async_add_new_scale) + + async_add_entities(entities) class LaMarzoccoSelectEntity(LaMarzoccoEntity, SelectEntity): @@ -122,7 +163,7 @@ class LaMarzoccoSelectEntity(LaMarzoccoEntity, SelectEntity): entity_description: LaMarzoccoSelectEntityDescription @property - def current_option(self) -> str: + def current_option(self) -> str | None: """Return the current selected option.""" return str( self.entity_description.current_option_fn(self.coordinator.device.config) @@ -145,3 +186,9 @@ class LaMarzoccoSelectEntity(LaMarzoccoEntity, SelectEntity): }, ) from exc self.async_write_ha_state() + + +class LaMarzoccoScaleSelectEntity(LaMarzoccoSelectEntity, LaMarzoccScaleEntity): + """Select entity for La Marzocco scales.""" + + entity_description: LaMarzoccoSelectEntityDescription diff --git a/homeassistant/components/lamarzocco/sensor.py b/homeassistant/components/lamarzocco/sensor.py index 8d57c1b8403..2acca879d52 100644 --- a/homeassistant/components/lamarzocco/sensor.py +++ b/homeassistant/components/lamarzocco/sensor.py @@ -12,12 +12,17 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import EntityCategory, UnitOfTemperature, UnitOfTime +from homeassistant.const import ( + PERCENTAGE, + EntityCategory, + UnitOfTemperature, + UnitOfTime, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import LaMarzoccoConfigEntry -from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity # Coordinator is used to centralize the data updates PARALLEL_UPDATES = 0 @@ -91,6 +96,21 @@ STATISTIC_ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( ), ) +SCALE_ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = ( + LaMarzoccoSensorEntityDescription( + key="scale_battery", + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.BATTERY, + value_fn=lambda device: ( + device.config.scale.battery if device.config.scale else 0 + ), + supported_fn=( + lambda coordinator: coordinator.device.model == MachineModel.LINEA_MINI + ), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -106,6 +126,15 @@ async def async_setup_entry( if description.supported_fn(config_coordinator) ] + if ( + config_coordinator.device.model == MachineModel.LINEA_MINI + and config_coordinator.device.config.scale + ): + entities.extend( + LaMarzoccoScaleSensorEntity(config_coordinator, description) + for description in SCALE_ENTITIES + ) + statistics_coordinator = entry.runtime_data.statistics_coordinator entities.extend( LaMarzoccoSensorEntity(statistics_coordinator, description) @@ -113,6 +142,14 @@ async def async_setup_entry( if description.supported_fn(statistics_coordinator) ) + def _async_add_new_scale() -> None: + async_add_entities( + LaMarzoccoScaleSensorEntity(config_coordinator, description) + for description in SCALE_ENTITIES + ) + + config_coordinator.new_device_callback.append(_async_add_new_scale) + async_add_entities(entities) @@ -125,3 +162,9 @@ class LaMarzoccoSensorEntity(LaMarzoccoEntity, SensorEntity): def native_value(self) -> int | float: """State of the sensor.""" return self.entity_description.value_fn(self.coordinator.device) + + +class LaMarzoccoScaleSensorEntity(LaMarzoccoSensorEntity, LaMarzoccScaleEntity): + """Sensor for a La Marzocco scale.""" + + entity_description: LaMarzoccoSensorEntityDescription diff --git a/homeassistant/components/lamarzocco/strings.json b/homeassistant/components/lamarzocco/strings.json index 666eb7f4a84..cc96e4615dc 100644 --- a/homeassistant/components/lamarzocco/strings.json +++ b/homeassistant/components/lamarzocco/strings.json @@ -122,6 +122,9 @@ "preinfusion_off_key": { "name": "Preinfusion time Key {key}" }, + "scale_target_key": { + "name": "Brew by weight target {key}" + }, "smart_standby_time": { "name": "Smart standby time" }, @@ -133,6 +136,13 @@ } }, "select": { + "active_bbw": { + "name": "Active brew by weight recipe", + "state": { + "a": "Recipe A", + "b": "Recipe B" + } + }, "prebrew_infusion_select": { "name": "Prebrew/-infusion mode", "state": { diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 997fa73604c..658e0dd96bc 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -135,7 +135,10 @@ def mock_lamarzocco(device_fixture: MachineModel) -> Generator[MagicMock]: serial_number=serial_number, name=serial_number, ) - config = load_json_object_fixture("config.json", DOMAIN) + if device_fixture == MachineModel.LINEA_MINI: + config = load_json_object_fixture("config_mini.json", DOMAIN) + else: + config = load_json_object_fixture("config.json", DOMAIN) statistics = json.loads(load_fixture("statistics.json", DOMAIN)) dummy_machine.parse_config(config) diff --git a/tests/components/lamarzocco/fixtures/config_mini.json b/tests/components/lamarzocco/fixtures/config_mini.json new file mode 100644 index 00000000000..22533a94872 --- /dev/null +++ b/tests/components/lamarzocco/fixtures/config_mini.json @@ -0,0 +1,116 @@ +{ + "version": "v1", + "preinfusionModesAvailable": ["ByDoseType"], + "machineCapabilities": [ + { + "family": "LINEA", + "groupsNumber": 1, + "coffeeBoilersNumber": 1, + "hasCupWarmer": false, + "steamBoilersNumber": 1, + "teaDosesNumber": 1, + "machineModes": ["BrewingMode", "StandBy"], + "schedulingType": "smartWakeUpSleep" + } + ], + "machine_sn": "Sn01239157", + "machine_hw": "0", + "isPlumbedIn": false, + "isBackFlushEnabled": false, + "standByTime": 0, + "tankStatus": true, + "settings": [], + "recipes": [ + { + "id": "Recipe1", + "dose_mode": "Mass", + "recipe_doses": [ + { "id": "A", "target": 32 }, + { "id": "B", "target": 45 } + ] + } + ], + "recipeAssignment": [ + { + "dose_index": "DoseA", + "recipe_id": "Recipe1", + "recipe_dose": "A", + "group": "Group1" + } + ], + "groupCapabilities": [ + { + "capabilities": { + "groupType": "AV_Group", + "groupNumber": "Group1", + "boilerId": "CoffeeBoiler1", + "hasScale": false, + "hasFlowmeter": false, + "numberOfDoses": 1 + }, + "doses": [ + { + "groupNumber": "Group1", + "doseIndex": "DoseA", + "doseType": "MassType", + "stopTarget": 32 + } + ], + "doseMode": { "groupNumber": "Group1", "brewingType": "ManualType" } + } + ], + "machineMode": "StandBy", + "teaDoses": { "DoseA": { "doseIndex": "DoseA", "stopTarget": 0 } }, + "scale": { + "connected": true, + "address": "44:b7:d0:74:5f:90", + "name": "LMZ-123A45", + "battery": 64 + }, + "boilers": [ + { "id": "SteamBoiler", "isEnabled": false, "target": 0, "current": 0 }, + { "id": "CoffeeBoiler1", "isEnabled": true, "target": 89, "current": 42 } + ], + "boilerTargetTemperature": { "SteamBoiler": 0, "CoffeeBoiler1": 89 }, + "preinfusionMode": { + "Group1": { + "groupNumber": "Group1", + "preinfusionStyle": "PreinfusionByDoseType" + } + }, + "preinfusionSettings": { + "mode": "TypeB", + "Group1": [ + { + "groupNumber": "Group1", + "doseType": "DoseA", + "preWetTime": 2, + "preWetHoldTime": 3 + } + ] + }, + "wakeUpSleepEntries": [ + { + "id": "T6aLl42", + "days": [ + "monday", + "tuesday", + "wednesday", + "thursday", + "friday", + "saturday", + "sunday" + ], + "steam": false, + "enabled": false, + "timeOn": "24:0", + "timeOff": "24:0" + } + ], + "smartStandBy": { "mode": "LastBrewing", "minutes": 10, "enabled": true }, + "clock": "2024-08-31T14:47:45", + "firmwareVersions": [ + { "name": "machine_firmware", "fw_version": "2.12" }, + { "name": "gateway_firmware", "fw_version": "v3.6-rc4" } + ] +} diff --git a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr index cda285a7106..5308ae22184 100644 --- a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr @@ -140,3 +140,50 @@ 'unit_of_measurement': None, }) # --- +# name: test_scale_connectivity[Linea Mini] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'LMZ-123A45 Connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.lmz_123a45_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_scale_connectivity[Linea Mini].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lmz_123a45_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connectivity', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'LM012345_connected', + 'unit_of_measurement': None, + }) +# --- diff --git a/tests/components/lamarzocco/snapshots/test_init.ambr b/tests/components/lamarzocco/snapshots/test_init.ambr index 519a9301bfd..67aa0b8bea8 100644 --- a/tests/components/lamarzocco/snapshots/test_init.ambr +++ b/tests/components/lamarzocco/snapshots/test_init.ambr @@ -39,3 +39,35 @@ 'via_device_id': None, }) # --- +# name: test_scale_device[Linea Mini] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'lamarzocco', + '44:b7:d0:74:5f:90', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Acaia', + 'model': 'Lunar', + 'model_id': 'Y.301', + 'name': 'LMZ-123A45', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- diff --git a/tests/components/lamarzocco/snapshots/test_number.ambr b/tests/components/lamarzocco/snapshots/test_number.ambr index b7e42bb425f..49e4713aab1 100644 --- a/tests/components/lamarzocco/snapshots/test_number.ambr +++ b/tests/components/lamarzocco/snapshots/test_number.ambr @@ -657,7 +657,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '3', }) # --- # name: test_pre_brew_infusion_numbers[prebrew_off_time-set_prebrew_time-Enabled-6-kwargs0-Linea Mini].1 @@ -771,7 +771,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '3', }) # --- # name: test_pre_brew_infusion_numbers[prebrew_on_time-set_prebrew_time-Enabled-6-kwargs1-Linea Mini].1 @@ -885,7 +885,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1', + 'state': '3', }) # --- # name: test_pre_brew_infusion_numbers[preinfusion_time-set_preinfusion_time-TypeB-7-kwargs2-Linea Mini].1 @@ -983,3 +983,113 @@ 'unit_of_measurement': , }) # --- +# name: test_set_target[Linea Mini-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LMZ-123A45 Brew by weight target 1', + 'max': 100, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.lmz_123a45_brew_by_weight_target_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32', + }) +# --- +# name: test_set_target[Linea Mini-1].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.lmz_123a45_brew_by_weight_target_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Brew by weight target 1', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'scale_target_key', + 'unique_id': 'LM012345_scale_target_key1', + 'unit_of_measurement': None, + }) +# --- +# name: test_set_target[Linea Mini-2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LMZ-123A45 Brew by weight target 2', + 'max': 100, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.lmz_123a45_brew_by_weight_target_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45', + }) +# --- +# name: test_set_target[Linea Mini-2].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 1, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.lmz_123a45_brew_by_weight_target_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Brew by weight target 2', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'scale_target_key', + 'unique_id': 'LM012345_scale_target_key2', + 'unit_of_measurement': None, + }) +# --- diff --git a/tests/components/lamarzocco/snapshots/test_select.ambr b/tests/components/lamarzocco/snapshots/test_select.ambr index 46fa55eff13..325409a0b7f 100644 --- a/tests/components/lamarzocco/snapshots/test_select.ambr +++ b/tests/components/lamarzocco/snapshots/test_select.ambr @@ -1,4 +1,59 @@ # serializer version: 1 +# name: test_active_bbw_recipe[Linea Mini] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LMZ-123A45 Active brew by weight recipe', + 'options': list([ + 'a', + 'b', + ]), + }), + 'context': , + 'entity_id': 'select.lmz_123a45_active_brew_by_weight_recipe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'a', + }) +# --- +# name: test_active_bbw_recipe[Linea Mini].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'a', + 'b', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.lmz_123a45_active_brew_by_weight_recipe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Active brew by weight recipe', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_bbw', + 'unique_id': 'LM012345_active_bbw', + 'unit_of_measurement': None, + }) +# --- # name: test_pre_brew_infusion_select[GS3 AV] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/lamarzocco/snapshots/test_sensor.ambr b/tests/components/lamarzocco/snapshots/test_sensor.ambr index da1efbf1eaa..6afdffab821 100644 --- a/tests/components/lamarzocco/snapshots/test_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_sensor.ambr @@ -1,4 +1,55 @@ # serializer version: 1 +# name: test_scale_battery[Linea Mini] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'LMZ-123A45 Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lmz_123a45_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '64', + }) +# --- +# name: test_scale_battery[Linea Mini].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lmz_123a45_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'lamarzocco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'LM012345_scale_battery', + 'unit_of_measurement': '%', + }) +# --- # name: test_sensors[GS012345_current_coffee_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/lamarzocco/test_binary_sensor.py b/tests/components/lamarzocco/test_binary_sensor.py index 956bfe90dd4..cba806d887c 100644 --- a/tests/components/lamarzocco/test_binary_sensor.py +++ b/tests/components/lamarzocco/test_binary_sensor.py @@ -4,7 +4,10 @@ from datetime import timedelta from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory +from pylamarzocco.const import MachineModel from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoScale +import pytest from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE @@ -98,3 +101,68 @@ async def test_sensor_going_unavailable( state = hass.states.get(brewing_active_sensor) assert state assert state.state == STATE_UNAVAILABLE + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_scale_connectivity( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the scale binary sensors.""" + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("binary_sensor.lmz_123a45_connectivity") + assert state + assert state == snapshot + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry.device_id + assert entry == snapshot + + +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.GS3_AV, MachineModel.GS3_MP, MachineModel.LINEA_MICRA], +) +async def test_other_models_no_scale_connectivity( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure the other models don't have a connectivity sensor.""" + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("binary_sensor.lmz_123a45_connectivity") + assert state is None + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_connectivity_on_new_scale_added( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure the connectivity binary sensor for a new scale is added automatically.""" + + mock_lamarzocco.config.scale = None + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("binary_sensor.scale_123a45_connectivity") + assert state is None + + mock_lamarzocco.config.scale = LaMarzoccoScale( + connected=True, name="Scale-123A45", address="aa:bb:cc:dd:ee:ff", battery=50 + ) + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("binary_sensor.scale_123a45_connectivity") + assert state diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 446c8780b62..7d90c049a3b 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -1,8 +1,10 @@ """Test initialization of lamarzocco.""" +from datetime import timedelta from unittest.mock import AsyncMock, MagicMock, patch -from pylamarzocco.const import FirmwareType +from freezegun.api import FrozenDateTimeFactory +from pylamarzocco.const import FirmwareType, MachineModel from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -27,7 +29,7 @@ from homeassistant.helpers import ( from . import USER_INPUT, async_init_integration, get_bluetooth_service_info -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed async def test_load_unload_config_entry( @@ -251,3 +253,49 @@ async def test_device( device = device_registry.async_get(entry.device_id) assert device assert device == snapshot + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_scale_device( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the device.""" + + await async_init_integration(hass, mock_config_entry) + + device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_lamarzocco.config.scale.address)} + ) + assert device + assert device == snapshot + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_remove_stale_scale( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure stale scale is cleaned up.""" + + await async_init_integration(hass, mock_config_entry) + + scale_address = mock_lamarzocco.config.scale.address + + device = device_registry.async_get_device(identifiers={(DOMAIN, scale_address)}) + assert device + + mock_lamarzocco.config.scale = None + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + device = device_registry.async_get_device(identifiers={(DOMAIN, scale_address)}) + assert device is None diff --git a/tests/components/lamarzocco/test_number.py b/tests/components/lamarzocco/test_number.py index 710a0220e06..65c5e264f22 100644 --- a/tests/components/lamarzocco/test_number.py +++ b/tests/components/lamarzocco/test_number.py @@ -1,8 +1,10 @@ """Tests for the La Marzocco number entities.""" +from datetime import timedelta from typing import Any from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory from pylamarzocco.const import ( KEYS_PER_MODEL, BoilerType, @@ -11,6 +13,7 @@ from pylamarzocco.const import ( PrebrewMode, ) from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoScale import pytest from syrupy import SnapshotAssertion @@ -26,7 +29,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from . import async_init_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.mark.parametrize( @@ -444,3 +447,91 @@ async def test_number_error( blocking=True, ) assert exc_info.value.translation_key == "number_exception_key" + + +@pytest.mark.parametrize("physical_key", [PhysicalKey.A, PhysicalKey.B]) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_set_target( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + physical_key: PhysicalKey, +) -> None: + """Test the La Marzocco set target sensors.""" + + await async_init_integration(hass, mock_config_entry) + + entity_name = f"number.lmz_123a45_brew_by_weight_target_{int(physical_key)}" + + state = hass.states.get(entity_name) + + assert state + assert state == snapshot + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry == snapshot + + # service call + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_name, + ATTR_VALUE: 42, + }, + blocking=True, + ) + + mock_lamarzocco.set_bbw_recipe_target.assert_called_once_with(physical_key, 42) + + +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.GS3_AV, MachineModel.GS3_MP, MachineModel.LINEA_MICRA], +) +async def test_other_models_no_scale_set_target( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure the other models don't have a set target numbers.""" + await async_init_integration(hass, mock_config_entry) + + for i in range(1, 3): + state = hass.states.get(f"number.lmz_123a45_brew_by_weight_target_{i}") + assert state is None + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_set_target_on_new_scale_added( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure the set target numbers for a new scale are added automatically.""" + + mock_lamarzocco.config.scale = None + await async_init_integration(hass, mock_config_entry) + + for i in range(1, 3): + state = hass.states.get(f"number.scale_123a45_brew_by_weight_target_{i}") + assert state is None + + mock_lamarzocco.config.scale = LaMarzoccoScale( + connected=True, name="Scale-123A45", address="aa:bb:cc:dd:ee:ff", battery=50 + ) + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + for i in range(1, 3): + state = hass.states.get(f"number.scale_123a45_brew_by_weight_target_{i}") + assert state diff --git a/tests/components/lamarzocco/test_select.py b/tests/components/lamarzocco/test_select.py index 24b96f84f37..614bffac172 100644 --- a/tests/components/lamarzocco/test_select.py +++ b/tests/components/lamarzocco/test_select.py @@ -1,9 +1,18 @@ """Tests for the La Marzocco select entities.""" +from datetime import timedelta from unittest.mock import MagicMock -from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from freezegun.api import FrozenDateTimeFactory +from pylamarzocco.const import ( + MachineModel, + PhysicalKey, + PrebrewMode, + SmartStandbyMode, + SteamLevel, +) from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoScale import pytest from syrupy import SnapshotAssertion @@ -17,9 +26,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -pytestmark = pytest.mark.usefixtures("init_integration") +from . import async_init_integration + +from tests.common import MockConfigEntry, async_fire_time_changed +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MICRA]) async def test_steam_boiler_level( hass: HomeAssistant, @@ -54,6 +66,9 @@ async def test_steam_boiler_level( mock_lamarzocco.set_steam_level.assert_called_once_with(level=SteamLevel.LEVEL_2) +pytest.mark.usefixtures("init_integration") + + @pytest.mark.parametrize( "device_fixture", [MachineModel.GS3_AV, MachineModel.GS3_MP, MachineModel.LINEA_MINI], @@ -69,6 +84,7 @@ async def test_steam_boiler_level_none( assert state is None +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize( "device_fixture", [MachineModel.LINEA_MICRA, MachineModel.GS3_AV, MachineModel.LINEA_MINI], @@ -106,6 +122,7 @@ async def test_pre_brew_infusion_select( mock_lamarzocco.set_prebrew_mode.assert_called_once_with(mode=PrebrewMode.PREBREW) +@pytest.mark.usefixtures("init_integration") @pytest.mark.parametrize( "device_fixture", [MachineModel.GS3_MP], @@ -121,6 +138,7 @@ async def test_pre_brew_infusion_select_none( assert state is None +@pytest.mark.usefixtures("init_integration") async def test_smart_standby_mode( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -155,6 +173,7 @@ async def test_smart_standby_mode( ) +@pytest.mark.usefixtures("init_integration") async def test_select_errors( hass: HomeAssistant, mock_lamarzocco: MagicMock, @@ -179,3 +198,77 @@ async def test_select_errors( blocking=True, ) assert exc_info.value.translation_key == "select_option_error" + + +@pytest.mark.usefixtures("init_integration") +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_active_bbw_recipe( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_lamarzocco: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test the La Marzocco active bbw recipe select.""" + + state = hass.states.get("select.lmz_123a45_active_brew_by_weight_recipe") + + assert state + assert state == snapshot + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry == snapshot + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.lmz_123a45_active_brew_by_weight_recipe", + ATTR_OPTION: "b", + }, + blocking=True, + ) + + mock_lamarzocco.set_active_bbw_recipe.assert_called_once_with(PhysicalKey.B) + + +@pytest.mark.usefixtures("init_integration") +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.GS3_AV, MachineModel.GS3_MP, MachineModel.LINEA_MICRA], +) +async def test_other_models_no_active_bbw_select( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, +) -> None: + """Ensure the other models don't have a battery sensor.""" + + state = hass.states.get("select.lmz_123a45_active_brew_by_weight_recipe") + assert state is None + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_active_bbw_select_on_new_scale_added( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure the active bbw select for a new scale is added automatically.""" + + mock_lamarzocco.config.scale = None + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("select.scale_123a45_active_brew_by_weight_recipe") + assert state is None + + mock_lamarzocco.config.scale = LaMarzoccoScale( + connected=True, name="Scale-123A45", address="aa:bb:cc:dd:ee:ff", battery=50 + ) + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("select.scale_123a45_active_brew_by_weight_recipe") + assert state diff --git a/tests/components/lamarzocco/test_sensor.py b/tests/components/lamarzocco/test_sensor.py index 6f14d52d1fc..e0426e132c3 100644 --- a/tests/components/lamarzocco/test_sensor.py +++ b/tests/components/lamarzocco/test_sensor.py @@ -1,8 +1,11 @@ """Tests for La Marzocco sensors.""" +from datetime import timedelta from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory from pylamarzocco.const import MachineModel +from pylamarzocco.models import LaMarzoccoScale import pytest from syrupy import SnapshotAssertion @@ -12,7 +15,7 @@ from homeassistant.helpers import entity_registry as er from . import async_init_integration -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed SENSORS = ( "total_coffees_made", @@ -85,3 +88,67 @@ async def test_no_steam_linea_mini( serial_number = mock_lamarzocco.serial_number state = hass.states.get(f"sensor.{serial_number}_current_temp_steam") assert state is None + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_scale_battery( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the scale battery sensor.""" + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.lmz_123a45_battery") + assert state == snapshot + + entry = entity_registry.async_get(state.entity_id) + assert entry + assert entry.device_id + assert entry == snapshot + + +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.GS3_AV, MachineModel.GS3_MP, MachineModel.LINEA_MICRA], +) +async def test_other_models_no_scale_battery( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure the other models don't have a battery sensor.""" + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.lmz_123a45_battery") + assert state is None + + +@pytest.mark.parametrize("device_fixture", [MachineModel.LINEA_MINI]) +async def test_battery_on_new_scale_added( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure the battery sensor for a new scale is added automatically.""" + + mock_lamarzocco.config.scale = None + await async_init_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.lmz_123a45_battery") + assert state is None + + mock_lamarzocco.config.scale = LaMarzoccoScale( + connected=True, name="Scale-123A45", address="aa:bb:cc:dd:ee:ff", battery=50 + ) + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("sensor.scale_123a45_battery") + assert state From b5c46083735aaa36681a0206d825d3b71e8b7608 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Fri, 20 Dec 2024 12:25:45 +0100 Subject: [PATCH 516/677] Upgrade QS from bronze to silver for slide_local (#133560) --- .../components/slide_local/manifest.json | 2 +- .../components/slide_local/quality_scale.yaml | 19 ++++++++++++++----- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/slide_local/manifest.json b/homeassistant/components/slide_local/manifest.json index 42c74b2c308..69d5c93b0af 100644 --- a/homeassistant/components/slide_local/manifest.json +++ b/homeassistant/components/slide_local/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/slide_local", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "bronze", + "quality_scale": "silver", "requirements": ["goslide-api==0.7.0"], "zeroconf": [ { diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index c3ce12efd80..7a2be591927 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -26,7 +26,10 @@ rules: log-when-unavailable: done entity-unavailable: done action-exceptions: done - reauthentication-flow: todo + reauthentication-flow: + status: exempt + comment: | + The password used is the device code and can't change. No reauth required. parallel-updates: done test-coverage: done integration-owner: done @@ -34,18 +37,24 @@ rules: docs-configuration-parameters: done # Gold - entity-translations: todo + entity-translations: done entity-device-class: done devices: done entity-category: done entity-disabled-by-default: done discovery: done - stale-devices: todo + stale-devices: + status: done + comment: | + Slide_local represents a single physical device, no removal stale devices required (besides removal of instance itself). diagnostics: done exception-translations: done - icon-translations: todo + icon-translations: done reconfiguration-flow: todo - dynamic-devices: todo + dynamic-devices: + status: exempt + comment: | + Slide_local represents a single physical device, no dynamic changes of devices possible (besides removal of instance itself). discovery-update-info: todo repair-issues: todo docs-use-cases: done From 86e43b7196dceabe546fec7dbb928afe271242ec Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 12:29:04 +0100 Subject: [PATCH 517/677] Record Knocki quality scale (#133582) * Record Knocki quality scale * Record Knocki quality scale * Fix --- .../components/knocki/quality_scale.yaml | 92 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 92 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/knocki/quality_scale.yaml diff --git a/homeassistant/components/knocki/quality_scale.yaml b/homeassistant/components/knocki/quality_scale.yaml new file mode 100644 index 00000000000..45b3764d786 --- /dev/null +++ b/homeassistant/components/knocki/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: + status: exempt + comment: | + This integration is push-based. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: + status: todo + comment: data_descriptions are missing + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have any configuration parameters. + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: todo + test-coverage: done + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: This is a cloud service and does not benefit from device updates. + discovery: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: done + entity-category: + status: exempt + comment: | + The default ones are good. + entity-device-class: + status: exempt + comment: | + Knocki does not have a device class. + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have any entities that are disabled by default. + entity-translations: + status: exempt + comment: | + This integration does not have any translatable entities. + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 4e5cee2d16d..6109924b4e9 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -563,7 +563,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "kitchen_sink", "kiwi", "kmtronic", - "knocki", "knx", "kodi", "konnected", From df383a3a31f415dacb3fdfa6e2296db9fc09e5f6 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 12:29:54 +0100 Subject: [PATCH 518/677] Record Mealie quality scale (#133587) * Record Mealie quality scale * Record NYT Games quality scale * Fix * Fix * Fix --- .../components/mealie/quality_scale.yaml | 79 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 79 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/mealie/quality_scale.yaml diff --git a/homeassistant/components/mealie/quality_scale.yaml b/homeassistant/components/mealie/quality_scale.yaml new file mode 100644 index 00000000000..c3b748525ca --- /dev/null +++ b/homeassistant/components/mealie/quality_scale.yaml @@ -0,0 +1,79 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have any configuration parameters. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: + status: todo + comment: Platform missing tests + # Gold + devices: done + diagnostics: done + discovery-update-info: todo + discovery: todo + docs-data-update: done + docs-examples: done + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: done + comment: | + The integration adds new todo lists on runtime. + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have any irrelevant entities. + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: done + comment: | + The integration removes removed todo lists on runtime. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 6109924b4e9..ec8898c5df5 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -631,7 +631,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "matter", "maxcube", "mazda", - "mealie", "meater", "medcom_ble", "media_extractor", From 6a599dc27a7a2f90480506f6cbed8f19390ec874 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 12:30:35 +0100 Subject: [PATCH 519/677] Record NYT Games quality scale (#133592) * Record NYT Games quality scale * Record NYT Games quality scale * Fix --- .../components/nyt_games/quality_scale.yaml | 92 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 92 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/nyt_games/quality_scale.yaml diff --git a/homeassistant/components/nyt_games/quality_scale.yaml b/homeassistant/components/nyt_games/quality_scale.yaml new file mode 100644 index 00000000000..9f455bd4e2c --- /dev/null +++ b/homeassistant/components/nyt_games/quality_scale.yaml @@ -0,0 +1,92 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: + status: exempt + comment: | + This is handled by the coordinator. + integration-owner: done + log-when-unavailable: + status: done + comment: | + This is handled by the coordinator. + parallel-updates: todo + reauthentication-flow: todo + test-coverage: done + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration is a service and not discoverable. + discovery: + status: exempt + comment: | + This integration is a service and not discoverable. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: + status: done + comment: | + The entities are categorized well by using default category. + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: exempt + comment: | + Games can't be "unplayed". + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index ec8898c5df5..8e3d0311719 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -733,7 +733,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "nut", "nws", "nx584", - "nyt_games", "nzbget", "oasa_telematics", "obihai", From e62a563ec143ff00df16f1b2c88a5f5e4c804e08 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Fri, 20 Dec 2024 12:30:55 +0100 Subject: [PATCH 520/677] Add Swiss Public Transport quality scale record (#131629) --- .../swiss_public_transport/quality_scale.yaml | 86 +++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 2 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/swiss_public_transport/quality_scale.yaml diff --git a/homeassistant/components/swiss_public_transport/quality_scale.yaml b/homeassistant/components/swiss_public_transport/quality_scale.yaml new file mode 100644 index 00000000000..0329f9c8fab --- /dev/null +++ b/homeassistant/components/swiss_public_transport/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: + status: done + comment: > + Polling interval is set to support one connection. + There is a rate limit at 10000 calls per day. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: todo + dependency-transparency: todo + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: No events implemented + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: + status: done + comment: Offloaded to coordinator + entity-unavailable: + status: done + comment: Offloaded to coordinator + action-exceptions: done + reauthentication-flow: + status: exempt + comment: No authentication needed + parallel-updates: todo + test-coverage: todo + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: + status: exempt + comment: no options flow + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: + status: done + comment: No disabled entities implemented + discovery: + status: exempt + comment: Nothing to discover + stale-devices: + status: exempt + comment: Stale not possible + diagnostics: todo + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: No dynamic devices + discovery-update-info: + status: exempt + comment: Nothing to discover + repair-issues: + status: exempt + comment: Nothing to repair + docs-use-cases: todo + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 8e3d0311719..e2eedea9a2c 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -983,7 +983,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "supla", "surepetcare", "swiss_hydrological_data", - "swiss_public_transport", "swisscom", "switch_as_x", "switchbee", From 5834ecb13ee1c293b535dd21f76f68af0257a47c Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 12:36:54 +0100 Subject: [PATCH 521/677] Fix homeassistant_included flag for local backups (#133640) --- homeassistant/components/backup/util.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py index bb01a9a4e3f..c4d69789aac 100644 --- a/homeassistant/components/backup/util.py +++ b/homeassistant/components/backup/util.py @@ -50,6 +50,7 @@ def read_backup(backup_path: Path) -> AgentBackup: if ( homeassistant := cast(JsonObjectType, data.get("homeassistant")) ) and "version" in homeassistant: + homeassistant_included = True homeassistant_version = cast(str, homeassistant["version"]) database_included = not cast( bool, homeassistant.get("exclude_database", False) From 4c96b832975dc03763a6cc52457fd748e4b57a39 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 13:13:25 +0100 Subject: [PATCH 522/677] Fix reading extra metadata for local backups (#133643) --- homeassistant/components/backup/util.py | 2 +- tests/components/backup/test_util.py | 88 +++++++++++++++++++++++++ 2 files changed, 89 insertions(+), 1 deletion(-) create mode 100644 tests/components/backup/test_util.py diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py index c4d69789aac..0cedc07443a 100644 --- a/homeassistant/components/backup/util.py +++ b/homeassistant/components/backup/util.py @@ -61,7 +61,7 @@ def read_backup(backup_path: Path) -> AgentBackup: backup_id=cast(str, data["slug"]), database_included=database_included, date=cast(str, data["date"]), - extra_metadata=cast(dict[str, bool | str], data.get("metadata", {})), + extra_metadata=cast(dict[str, bool | str], data.get("extra", {})), folders=folders, homeassistant_included=homeassistant_included, homeassistant_version=homeassistant_version, diff --git a/tests/components/backup/test_util.py b/tests/components/backup/test_util.py new file mode 100644 index 00000000000..888029f2e35 --- /dev/null +++ b/tests/components/backup/test_util.py @@ -0,0 +1,88 @@ +"""Tests for the Backup integration's utility functions.""" + +from __future__ import annotations + +from unittest.mock import Mock, patch + +import pytest + +from homeassistant.components.backup import AddonInfo, AgentBackup, Folder +from homeassistant.components.backup.util import read_backup + + +@pytest.mark.parametrize( + ("backup_json_content", "expected_backup"), + [ + ( + b'{"compressed":true,"date":"2024-12-02T07:23:58.261875-05:00","homeassistant":' + b'{"exclude_database":true,"version":"2024.12.0.dev0"},"name":"test",' + b'"protected":true,"slug":"455645fe","type":"partial","version":2}', + AgentBackup( + addons=[], + backup_id="455645fe", + date="2024-12-02T07:23:58.261875-05:00", + database_included=False, + extra_metadata={}, + folders=[], + homeassistant_included=True, + homeassistant_version="2024.12.0.dev0", + name="test", + protected=True, + size=1234, + ), + ), + ( + b'{"slug":"d4b8fdc6","version":2,"name":"Core 2025.1.0.dev0",' + b'"date":"2024-12-20T11:27:51.119062+00:00","type":"partial",' + b'"supervisor_version":"2024.12.1.dev1803",' + b'"extra":{"instance_id":"6b453733d2d74d2a9ae432ff2fbaaa64",' + b'"with_automatic_settings":false},"homeassistant":' + b'{"version":"2025.1.0.dev202412200230","exclude_database":false,"size":0.0},' + b'"compressed":true,"protected":true,"repositories":[' + b'"https://github.com/home-assistant/hassio-addons-development","local",' + b'"https://github.com/esphome/home-assistant-addon","core",' + b'"https://github.com/music-assistant/home-assistant-addon",' + b'"https://github.com/hassio-addons/repository"],"crypto":"aes128",' + b'"folders":["share","media"],"addons":[{"slug":"core_configurator",' + b'"name":"File editor","version":"5.5.0","size":0.0},' + b'{"slug":"ae6e943c_remote_api","name":"Remote API proxy",' + b'"version":"1.3.0","size":0.0}],"docker":{"registries":{}}}', + AgentBackup( + addons=[ + AddonInfo( + name="File editor", + slug="core_configurator", + version="5.5.0", + ), + AddonInfo( + name="Remote API proxy", + slug="ae6e943c_remote_api", + version="1.3.0", + ), + ], + backup_id="d4b8fdc6", + date="2024-12-20T11:27:51.119062+00:00", + database_included=True, + extra_metadata={ + "instance_id": "6b453733d2d74d2a9ae432ff2fbaaa64", + "with_automatic_settings": False, + }, + folders=[Folder.SHARE, Folder.MEDIA], + homeassistant_included=True, + homeassistant_version="2025.1.0.dev202412200230", + name="Core 2025.1.0.dev0", + protected=True, + size=1234, + ), + ), + ], +) +def test_read_backup(backup_json_content: bytes, expected_backup: AgentBackup) -> None: + """Test reading a backup.""" + mock_path = Mock() + mock_path.stat.return_value.st_size = 1234 + + with patch("homeassistant.components.backup.util.tarfile.open") as mock_open_tar: + mock_open_tar.return_value.__enter__.return_value.extractfile().read.return_value = backup_json_content + backup = read_backup(mock_path) + assert backup == expected_backup From 870dc4dbeabe55a86d8d73d8e9184f55cdf3bfac Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 13:14:41 +0100 Subject: [PATCH 523/677] Record Analytics Insights quality scale (#133571) * Record Analytics Insights quality scale * Record Analytics Insights quality scale * Record Analytics Insights quality scale * Update homeassistant/components/analytics_insights/quality_scale.yaml Co-authored-by: Josef Zweck * Update homeassistant/components/analytics_insights/quality_scale.yaml Co-authored-by: Josef Zweck --------- Co-authored-by: Josef Zweck --- .../analytics_insights/config_flow.py | 10 +- .../analytics_insights/quality_scale.yaml | 100 ++++++++++++++++++ script/hassfest/quality_scale.py | 1 - 3 files changed, 103 insertions(+), 8 deletions(-) create mode 100644 homeassistant/components/analytics_insights/quality_scale.yaml diff --git a/homeassistant/components/analytics_insights/config_flow.py b/homeassistant/components/analytics_insights/config_flow.py index c36755f5403..da77a35f789 100644 --- a/homeassistant/components/analytics_insights/config_flow.py +++ b/homeassistant/components/analytics_insights/config_flow.py @@ -11,12 +11,7 @@ from python_homeassistant_analytics import ( from python_homeassistant_analytics.models import IntegrationType import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import ( @@ -25,6 +20,7 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, ) +from . import AnalyticsInsightsConfigEntry from .const import ( CONF_TRACKED_ADDONS, CONF_TRACKED_CUSTOM_INTEGRATIONS, @@ -46,7 +42,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: AnalyticsInsightsConfigEntry, ) -> HomeassistantAnalyticsOptionsFlowHandler: """Get the options flow for this handler.""" return HomeassistantAnalyticsOptionsFlowHandler() diff --git a/homeassistant/components/analytics_insights/quality_scale.yaml b/homeassistant/components/analytics_insights/quality_scale.yaml new file mode 100644 index 00000000000..ff999d97d03 --- /dev/null +++ b/homeassistant/components/analytics_insights/quality_scale.yaml @@ -0,0 +1,100 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide actions. + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: + status: done + comment: | + The coordinator handles this. + integration-owner: done + log-when-unavailable: + status: done + comment: | + The coordinator handles this. + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: todo + # Gold + devices: done + diagnostics: todo + discovery-update-info: + status: exempt + comment: | + This integration is a cloud service and thus does not support discovery. + discovery: + status: exempt + comment: | + This integration is a cloud service and thus does not support discovery. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration has a fixed single service. + entity-category: done + entity-device-class: + status: exempt + comment: | + This integration does not have entities with device classes. + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: + status: exempt + comment: All the options of this integration are managed via the options flow + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: exempt + comment: | + This integration has a fixed single service. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index e2eedea9a2c..3d880d7b536 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -151,7 +151,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "amcrest", "ampio", "analytics", - "analytics_insights", "android_ip_webcam", "androidtv", "androidtv_remote", From 7d04eef5c5de7c7867378f247005bb0d607cfe54 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 15:06:55 +0100 Subject: [PATCH 524/677] Reject duplicates in WS command backup/config/update (#133650) * Reject duplicates in WS command backup/config/update * Add tests --- homeassistant/components/backup/websocket.py | 10 +- .../backup/snapshots/test_websocket.ambr | 180 ++++++++++++++++++ tests/components/backup/test_websocket.py | 12 ++ 3 files changed, 199 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 2fee84e39bb..718ffc3ae44 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -291,11 +291,15 @@ async def handle_config_info( vol.Required("type"): "backup/config/update", vol.Optional("create_backup"): vol.Schema( { - vol.Optional("agent_ids"): vol.All(list[str]), - vol.Optional("include_addons"): vol.Any(list[str], None), + vol.Optional("agent_ids"): vol.All([str], vol.Unique()), + vol.Optional("include_addons"): vol.Any( + vol.All([str], vol.Unique()), None + ), vol.Optional("include_all_addons"): bool, vol.Optional("include_database"): bool, - vol.Optional("include_folders"): vol.Any([vol.Coerce(Folder)], None), + vol.Optional("include_folders"): vol.Any( + vol.All([vol.Coerce(Folder)], vol.Unique()), None + ), vol.Optional("name"): vol.Any(str, None), vol.Optional("password"): vol.Any(str, None), }, diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 1607e2e15d9..dbad733d83a 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -1466,6 +1466,186 @@ 'type': 'result', }) # --- +# name: test_config_update_errors[command1] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command1].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command2] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command2].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command3] + dict({ + 'id': 1, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- +# name: test_config_update_errors[command3].1 + dict({ + 'id': 3, + 'result': dict({ + 'config': dict({ + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'state': 'never', + }), + }), + }), + 'success': True, + 'type': 'result', + }) +# --- # name: test_delete[remote_agents0-backups0] dict({ 'id': 1, diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index a0860f49149..dfc118577b5 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -992,6 +992,18 @@ async def test_config_update( "create_backup": {"agent_ids": ["test-agent"]}, "schedule": "someday", }, + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test-agent", "test-agent"]}, + }, + { + "type": "backup/config/update", + "create_backup": {"include_addons": ["my-addon", "my-addon"]}, + }, + { + "type": "backup/config/update", + "create_backup": {"include_folders": ["media", "media"]}, + }, ], ) async def test_config_update_errors( From 0d309aa632e9e7a6de0db603fe8d7f2484e764c9 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Fri, 20 Dec 2024 15:14:04 +0100 Subject: [PATCH 525/677] Fix inconsistent spelling of "PIN" vs. "pin" (#133656) Part of the strings in the strings.json use an inconsistent spelling of "PIN" as "pin" This commit fixes this to ensure correct and consistent translations, too. --- homeassistant/components/elmax/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/elmax/strings.json b/homeassistant/components/elmax/strings.json index daa502a7dac..2ba74f5fc8f 100644 --- a/homeassistant/components/elmax/strings.json +++ b/homeassistant/components/elmax/strings.json @@ -50,7 +50,7 @@ "data": { "password": "[%key:common::config_flow::data::password%]", "username": "[%key:common::config_flow::data::username%]", - "panel_pin": "Panel Pin" + "panel_pin": "Panel PIN" } } }, @@ -58,7 +58,7 @@ "no_panel_online": "No online Elmax control panel was found.", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "network_error": "A network error occurred", - "invalid_pin": "The provided pin is invalid", + "invalid_pin": "The provided PIN is invalid", "invalid_mode": "Invalid or unsupported mode", "reauth_panel_disappeared": "The given panel is no longer associated to this user. Please log in using an account associated to this panel.", "unknown": "[%key:common::config_flow::error::unknown%]" From 83fdc07df0e2acc53ee8c363a15b6b6638e99616 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Fri, 20 Dec 2024 15:15:16 +0100 Subject: [PATCH 526/677] Fix inconsistent spelling of "PIN" vs. "pin" (#133655) As "PIN" is an abbreviation it should be all-caps throughout the UI. This commit fixes two inconsistent occurrences in the invoke_pin action. --- homeassistant/components/openhome/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/openhome/strings.json b/homeassistant/components/openhome/strings.json index b13fb997b7f..a757a2cb31c 100644 --- a/homeassistant/components/openhome/strings.json +++ b/homeassistant/components/openhome/strings.json @@ -2,11 +2,11 @@ "services": { "invoke_pin": { "name": "Invoke PIN", - "description": "Invokes a pin on the specified device.", + "description": "Invokes a PIN on the specified device.", "fields": { "pin": { "name": "PIN", - "description": "Which pin to invoke." + "description": "Which PIN to invoke." } } } From cbb4c06195e141af6d4d10d44e82d3c1cc0c702b Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 15:17:08 +0100 Subject: [PATCH 527/677] Add Mealie to strict typing (#133644) --- .strict-typing | 1 + homeassistant/components/mealie/quality_scale.yaml | 2 +- mypy.ini | 10 ++++++++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.strict-typing b/.strict-typing index a96597da4c6..07a96a3d692 100644 --- a/.strict-typing +++ b/.strict-typing @@ -311,6 +311,7 @@ homeassistant.components.manual.* homeassistant.components.mastodon.* homeassistant.components.matrix.* homeassistant.components.matter.* +homeassistant.components.mealie.* homeassistant.components.media_extractor.* homeassistant.components.media_player.* homeassistant.components.media_source.* diff --git a/homeassistant/components/mealie/quality_scale.yaml b/homeassistant/components/mealie/quality_scale.yaml index c3b748525ca..9153bf7aadf 100644 --- a/homeassistant/components/mealie/quality_scale.yaml +++ b/homeassistant/components/mealie/quality_scale.yaml @@ -76,4 +76,4 @@ rules: # Platinum async-dependency: done inject-websession: done - strict-typing: todo + strict-typing: done diff --git a/mypy.ini b/mypy.ini index ca7195ef92f..f0d024b6b68 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2866,6 +2866,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.mealie.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.media_extractor.*] check_untyped_defs = true disallow_incomplete_defs = true From 9f43a7a17b883603310a33e38b9002b66f18c029 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Fri, 20 Dec 2024 15:17:47 +0100 Subject: [PATCH 528/677] Fix inconsistent spelling of "PIN" and "ID" (#133653) * Fix inconsistent spelling of "PIN" and "ID" Several actions contain an inconsistent spelling of "PIN" and "ID" with lowercase characters. Especially to avoid (automated) mistranslations as (connection) pin etc. this needs to be corrected. * Fix lowercase "blink" as well Additional commit to fix the wrong spelling of "blink" along the way. --- homeassistant/components/blink/strings.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/blink/strings.json b/homeassistant/components/blink/strings.json index 6e2384e5d5b..74f8ae1cb28 100644 --- a/homeassistant/components/blink/strings.json +++ b/homeassistant/components/blink/strings.json @@ -84,16 +84,16 @@ } }, "send_pin": { - "name": "Send pin", - "description": "Sends a new PIN to blink for 2FA.", + "name": "Send PIN", + "description": "Sends a new PIN to Blink for 2FA.", "fields": { "pin": { - "name": "Pin", - "description": "PIN received from blink. Leave empty if you only received a verification email." + "name": "PIN", + "description": "PIN received from Blink. Leave empty if you only received a verification email." }, "config_entry_id": { "name": "Integration ID", - "description": "The Blink Integration id." + "description": "The Blink Integration ID." } } } From 10478f4ca58d57e2c6b3ad0ee1d157cf9f670dd2 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 15:19:57 +0100 Subject: [PATCH 529/677] Fix logic in backup retention filter (#133654) --- homeassistant/components/backup/config.py | 2 +- tests/components/backup/test_websocket.py | 29 +++++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/backup/config.py b/homeassistant/components/backup/config.py index e8d740d2e13..cdecf55848f 100644 --- a/homeassistant/components/backup/config.py +++ b/homeassistant/components/backup/config.py @@ -467,7 +467,7 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N sorted( backups.items(), key=lambda backup_item: backup_item[1].date, - )[: len(backups) - manager.config.data.retention.copies] + )[: max(len(backups) - manager.config.data.retention.copies, 0)] ) await _delete_filtered_backups(manager, _backups_filter) diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index dfc118577b5..c75fb978082 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -1331,6 +1331,35 @@ async def test_config_schedule_logic( 0, [], ), + ( + { + "type": "backup/config/update", + "create_backup": {"agent_ids": ["test.test-agent"]}, + "retention": {"copies": 3, "days": None}, + "schedule": "daily", + }, + { + "backup-1": MagicMock( + date="2024-11-10T04:45:00+01:00", + with_automatic_settings=True, + spec=ManagerBackup, + ), + "backup-2": MagicMock( + date="2024-11-11T04:45:00+01:00", + with_automatic_settings=True, + spec=ManagerBackup, + ), + }, + {}, + {}, + "2024-11-11T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + "2024-11-12T04:45:00+01:00", + 1, + 1, + 0, + [], + ), ( { "type": "backup/config/update", From f49111a4d92bd3c0ecf0b70a868af0254300744d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 15:23:21 +0100 Subject: [PATCH 530/677] Bump aiohasupervisor to version 0.2.2b4 (#133652) * Bump aiohasupervisor to version 0.2.2b4 * Update test --- homeassistant/components/hassio/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/hassio/test_backup.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index d2cf790219c..7276b76afc0 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.2.2b3"], + "requirements": ["aiohasupervisor==0.2.2b4"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index dae92035b11..bd78ef8e3fb 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,7 +3,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.2b3 +aiohasupervisor==0.2.2b4 aiohttp-fast-zlib==0.2.0 aiohttp==3.11.11 aiohttp_cors==0.7.0 diff --git a/pyproject.toml b/pyproject.toml index 171ca69dac0..71eae73a859 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ # Integrations may depend on hassio integration without listing it to # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.2b3", + "aiohasupervisor==0.2.2b4", "aiohttp==3.11.11", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", diff --git a/requirements.txt b/requirements.txt index 9f1615b37f2..78aa370c4ec 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.2b3 +aiohasupervisor==0.2.2b4 aiohttp==3.11.11 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index e6a28cd6f41..06cf4145b48 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -261,7 +261,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b3 +aiohasupervisor==0.2.2b4 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 788f0faff5f..3b9edd9e7c1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -246,7 +246,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b3 +aiohasupervisor==0.2.2b4 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 9338313c87d..c39574fd941 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -650,7 +650,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( ), ( {"include_all_addons": True}, - replace(DEFAULT_BACKUP_OPTIONS, addons="all"), + replace(DEFAULT_BACKUP_OPTIONS, addons="ALL"), ), ( {"include_database": False}, From 1c0135880dbc1c38cfc1ed0b5a8e92e1f666d348 Mon Sep 17 00:00:00 2001 From: shapournemati-iotty <130070037+shapournemati-iotty@users.noreply.github.com> Date: Fri, 20 Dec 2024 15:33:05 +0100 Subject: [PATCH 531/677] Add outlet device class to iotty switch entity (#132912) * upgrade iottycloud lib to 0.3.0 * Add outlet * test outlet turn on and turn off * test add outlet * Refactor code to use only one SwitchEntity with an EntityDescription to distinguish Outlet and Lightswitch * Refactor switch entities to reduce duplicated code * Refactor tests to reduce duplicated code * Refactor code to improve abstraction layer using specific types instead of generics * Remove print and redundant field --- homeassistant/components/iotty/switch.py | 126 +++++++++---- tests/components/iotty/conftest.py | 28 +++ .../iotty/snapshots/test_switch.ambr | 13 ++ tests/components/iotty/test_switch.py | 178 ++++++++++++++---- 4 files changed, 268 insertions(+), 77 deletions(-) diff --git a/homeassistant/components/iotty/switch.py b/homeassistant/components/iotty/switch.py index 1e2bdffcf79..b06e3ea308d 100644 --- a/homeassistant/components/iotty/switch.py +++ b/homeassistant/components/iotty/switch.py @@ -3,13 +3,22 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any -from iottycloud.device import Device from iottycloud.lightswitch import LightSwitch -from iottycloud.verbs import LS_DEVICE_TYPE_UID +from iottycloud.outlet import Outlet +from iottycloud.verbs import ( + COMMAND_TURNOFF, + COMMAND_TURNON, + LS_DEVICE_TYPE_UID, + OU_DEVICE_TYPE_UID, +) -from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -20,31 +29,62 @@ from .entity import IottyEntity _LOGGER = logging.getLogger(__name__) +ENTITIES: dict[str, SwitchEntityDescription] = { + LS_DEVICE_TYPE_UID: SwitchEntityDescription( + key="light", + name=None, + device_class=SwitchDeviceClass.SWITCH, + ), + OU_DEVICE_TYPE_UID: SwitchEntityDescription( + key="outlet", + name=None, + device_class=SwitchDeviceClass.OUTLET, + ), +} + async def async_setup_entry( hass: HomeAssistant, config_entry: IottyConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: - """Activate the iotty LightSwitch component.""" + """Activate the iotty Switch component.""" _LOGGER.debug("Setup SWITCH entry id is %s", config_entry.entry_id) coordinator = config_entry.runtime_data.coordinator - entities = [ - IottyLightSwitch( - coordinator=coordinator, iotty_cloud=coordinator.iotty, iotty_device=d + lightswitch_entities = [ + IottySwitch( + coordinator=coordinator, + iotty_cloud=coordinator.iotty, + iotty_device=d, + entity_description=ENTITIES[LS_DEVICE_TYPE_UID], ) for d in coordinator.data.devices if d.device_type == LS_DEVICE_TYPE_UID if (isinstance(d, LightSwitch)) ] - _LOGGER.debug("Found %d LightSwitches", len(entities)) + _LOGGER.debug("Found %d LightSwitches", len(lightswitch_entities)) + + outlet_entities = [ + IottySwitch( + coordinator=coordinator, + iotty_cloud=coordinator.iotty, + iotty_device=d, + entity_description=ENTITIES[OU_DEVICE_TYPE_UID], + ) + for d in coordinator.data.devices + if d.device_type == OU_DEVICE_TYPE_UID + if (isinstance(d, Outlet)) + ] + _LOGGER.debug("Found %d Outlets", len(outlet_entities)) + + entities = lightswitch_entities + outlet_entities async_add_entities(entities) known_devices: set = config_entry.runtime_data.known_devices for known_device in coordinator.data.devices: - if known_device.device_type == LS_DEVICE_TYPE_UID: + if known_device.device_type in {LS_DEVICE_TYPE_UID, OU_DEVICE_TYPE_UID}: known_devices.add(known_device) @callback @@ -59,21 +99,37 @@ async def async_setup_entry( # Add entities for devices which we've not yet seen for device in devices: - if ( - any(d.device_id == device.device_id for d in known_devices) - or device.device_type != LS_DEVICE_TYPE_UID + if any(d.device_id == device.device_id for d in known_devices) or ( + device.device_type not in {LS_DEVICE_TYPE_UID, OU_DEVICE_TYPE_UID} ): continue - iotty_entity = IottyLightSwitch( - coordinator=coordinator, - iotty_cloud=coordinator.iotty, - iotty_device=LightSwitch( + iotty_entity: SwitchEntity + iotty_device: LightSwitch | Outlet + if device.device_type == LS_DEVICE_TYPE_UID: + if TYPE_CHECKING: + assert isinstance(device, LightSwitch) + iotty_device = LightSwitch( device.device_id, device.serial_number, device.device_type, device.device_name, - ), + ) + else: + if TYPE_CHECKING: + assert isinstance(device, Outlet) + iotty_device = Outlet( + device.device_id, + device.serial_number, + device.device_type, + device.device_name, + ) + + iotty_entity = IottySwitch( + coordinator=coordinator, + iotty_cloud=coordinator.iotty, + iotty_device=iotty_device, + entity_description=ENTITIES[device.device_type], ) entities.extend([iotty_entity]) @@ -85,24 +141,27 @@ async def async_setup_entry( coordinator.async_add_listener(async_update_data) -class IottyLightSwitch(IottyEntity, SwitchEntity): - """Haas entity class for iotty LightSwitch.""" +class IottySwitch(IottyEntity, SwitchEntity): + """Haas entity class for iotty switch.""" - _attr_device_class = SwitchDeviceClass.SWITCH - _iotty_device: LightSwitch + _attr_device_class: SwitchDeviceClass | None + _iotty_device: LightSwitch | Outlet def __init__( self, coordinator: IottyDataUpdateCoordinator, iotty_cloud: IottyProxy, - iotty_device: LightSwitch, + iotty_device: LightSwitch | Outlet, + entity_description: SwitchEntityDescription, ) -> None: - """Initialize the LightSwitch device.""" + """Initialize the Switch device.""" super().__init__(coordinator, iotty_cloud, iotty_device) + self.entity_description = entity_description + self._attr_device_class = entity_description.device_class @property def is_on(self) -> bool: - """Return true if the LightSwitch is on.""" + """Return true if the Switch is on.""" _LOGGER.debug( "Retrieve device status for %s ? %s", self._iotty_device.device_id, @@ -111,30 +170,25 @@ class IottyLightSwitch(IottyEntity, SwitchEntity): return self._iotty_device.is_on async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the LightSwitch on.""" + """Turn the Switch on.""" _LOGGER.debug("[%s] Turning on", self._iotty_device.device_id) - await self._iotty_cloud.command( - self._iotty_device.device_id, self._iotty_device.cmd_turn_on() - ) + await self._iotty_cloud.command(self._iotty_device.device_id, COMMAND_TURNON) await self.coordinator.async_request_refresh() async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the LightSwitch off.""" + """Turn the Switch off.""" _LOGGER.debug("[%s] Turning off", self._iotty_device.device_id) - await self._iotty_cloud.command( - self._iotty_device.device_id, self._iotty_device.cmd_turn_off() - ) + await self._iotty_cloud.command(self._iotty_device.device_id, COMMAND_TURNOFF) await self.coordinator.async_request_refresh() @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - device: Device = next( + device: LightSwitch | Outlet = next( # type: ignore[assignment] device for device in self.coordinator.data.devices if device.device_id == self._iotty_device.device_id ) - if isinstance(device, LightSwitch): - self._iotty_device.is_on = device.is_on + self._iotty_device.is_on = device.is_on self.async_write_ha_state() diff --git a/tests/components/iotty/conftest.py b/tests/components/iotty/conftest.py index 1935a069cca..51a23bf18c7 100644 --- a/tests/components/iotty/conftest.py +++ b/tests/components/iotty/conftest.py @@ -6,10 +6,12 @@ from unittest.mock import AsyncMock, MagicMock, patch from aiohttp import ClientSession from iottycloud.device import Device from iottycloud.lightswitch import LightSwitch +from iottycloud.outlet import Outlet from iottycloud.shutter import Shutter from iottycloud.verbs import ( LS_DEVICE_TYPE_UID, OPEN_PERCENTAGE, + OU_DEVICE_TYPE_UID, RESULT, SH_DEVICE_TYPE_UID, STATUS, @@ -73,6 +75,22 @@ test_sh_one_added = [ sh_2, ] +ou_0 = Outlet("TestOU", "TEST_SERIAL_OU_0", OU_DEVICE_TYPE_UID, "[TEST] Outlet 0") + +ou_1 = Outlet("TestOU1", "TEST_SERIAL_OU_1", OU_DEVICE_TYPE_UID, "[TEST] Outlet 1") + +ou_2 = Outlet("TestOU2", "TEST_SERIAL_OU_2", OU_DEVICE_TYPE_UID, "[TEST] Outlet 2") + +test_ou = [ou_0, ou_1] + +test_ou_one_removed = [ou_0] + +test_ou_one_added = [ + ou_0, + ou_1, + ou_2, +] + @pytest.fixture async def local_oauth_impl(hass: HomeAssistant): @@ -175,6 +193,16 @@ def mock_get_devices_twolightswitches() -> Generator[AsyncMock]: yield mock_fn +@pytest.fixture +def mock_get_devices_two_outlets() -> Generator[AsyncMock]: + """Mock for get_devices, returning two outlets.""" + + with patch( + "iottycloud.cloudapi.CloudApi.get_devices", return_value=test_ou + ) as mock_fn: + yield mock_fn + + @pytest.fixture def mock_get_devices_twoshutters() -> Generator[AsyncMock]: """Mock for get_devices, returning two shutters.""" diff --git a/tests/components/iotty/snapshots/test_switch.ambr b/tests/components/iotty/snapshots/test_switch.ambr index 8ec22ed162a..c6e8764cf37 100644 --- a/tests/components/iotty/snapshots/test_switch.ambr +++ b/tests/components/iotty/snapshots/test_switch.ambr @@ -120,6 +120,19 @@ 'switch.test_light_switch_2_test_serial_2', ]) # --- +# name: test_outlet_insertion_ok + list([ + 'switch.test_outlet_0_test_serial_ou_0', + 'switch.test_outlet_1_test_serial_ou_1', + ]) +# --- +# name: test_outlet_insertion_ok.1 + list([ + 'switch.test_outlet_0_test_serial_ou_0', + 'switch.test_outlet_1_test_serial_ou_1', + 'switch.test_outlet_2_test_serial_ou_2', + ]) +# --- # name: test_setup_entry_ok_nodevices list([ ]) diff --git a/tests/components/iotty/test_switch.py b/tests/components/iotty/test_switch.py index 235a897c305..069fa665cac 100644 --- a/tests/components/iotty/test_switch.py +++ b/tests/components/iotty/test_switch.py @@ -20,12 +20,52 @@ from homeassistant.helpers import ( entity_registry as er, ) -from .conftest import test_ls_one_added, test_ls_one_removed +from .conftest import test_ls_one_added, test_ls_one_removed, test_ou_one_added from tests.common import MockConfigEntry, async_fire_time_changed -async def test_turn_on_ok( +async def check_command_ok( + entity_id: str, + initial_status: str, + final_status: str, + command: str, + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_status, + mock_command_fn, +) -> None: + """Issue a command.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state.state == initial_status + + mock_get_status.return_value = {RESULT: {STATUS: final_status}} + + await hass.services.async_call( + SWITCH_DOMAIN, + command, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + mock_command_fn.assert_called_once() + + assert (state := hass.states.get(entity_id)) + assert state.state == final_status + + +async def test_turn_on_light_ok( hass: HomeAssistant, mock_config_entry: MockConfigEntry, local_oauth_impl: ClientSession, @@ -37,34 +77,45 @@ async def test_turn_on_ok( entity_id = "switch.test_light_switch_0_test_serial_0" - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl + await check_command_ok( + entity_id=entity_id, + initial_status=STATUS_OFF, + final_status=STATUS_ON, + command=SERVICE_TURN_ON, + hass=hass, + mock_config_entry=mock_config_entry, + local_oauth_impl=local_oauth_impl, + mock_get_status=mock_get_status_filled_off, + mock_command_fn=mock_command_fn, ) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_OFF +async def test_turn_on_outlet_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_two_outlets, + mock_get_status_filled_off, + mock_command_fn, +) -> None: + """Issue a turnon command.""" - mock_get_status_filled_off.return_value = {RESULT: {STATUS: STATUS_ON}} + entity_id = "switch.test_outlet_0_test_serial_ou_0" - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, + await check_command_ok( + entity_id=entity_id, + initial_status=STATUS_OFF, + final_status=STATUS_ON, + command=SERVICE_TURN_ON, + hass=hass, + mock_config_entry=mock_config_entry, + local_oauth_impl=local_oauth_impl, + mock_get_status=mock_get_status_filled_off, + mock_command_fn=mock_command_fn, ) - await hass.async_block_till_done() - mock_command_fn.assert_called_once() - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_ON - - -async def test_turn_off_ok( +async def test_turn_off_light_ok( hass: HomeAssistant, mock_config_entry: MockConfigEntry, local_oauth_impl: ClientSession, @@ -76,32 +127,43 @@ async def test_turn_off_ok( entity_id = "switch.test_light_switch_0_test_serial_0" - mock_config_entry.add_to_hass(hass) - - config_entry_oauth2_flow.async_register_implementation( - hass, DOMAIN, local_oauth_impl + await check_command_ok( + entity_id=entity_id, + initial_status=STATUS_ON, + final_status=STATUS_OFF, + command=SERVICE_TURN_OFF, + hass=hass, + mock_config_entry=mock_config_entry, + local_oauth_impl=local_oauth_impl, + mock_get_status=mock_get_status_filled, + mock_command_fn=mock_command_fn, ) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_ON +async def test_turn_off_outlet_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_two_outlets, + mock_get_status_filled, + mock_command_fn, +) -> None: + """Issue a turnoff command.""" - mock_get_status_filled.return_value = {RESULT: {STATUS: STATUS_OFF}} + entity_id = "switch.test_outlet_0_test_serial_ou_0" - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, + await check_command_ok( + entity_id=entity_id, + initial_status=STATUS_ON, + final_status=STATUS_OFF, + command=SERVICE_TURN_OFF, + hass=hass, + mock_config_entry=mock_config_entry, + local_oauth_impl=local_oauth_impl, + mock_get_status=mock_get_status_filled, + mock_command_fn=mock_command_fn, ) - await hass.async_block_till_done() - mock_command_fn.assert_called_once() - - assert (state := hass.states.get(entity_id)) - assert state.state == STATUS_OFF - async def test_setup_entry_ok_nodevices( hass: HomeAssistant, @@ -229,6 +291,40 @@ async def test_devices_insertion_ok( assert hass.states.async_entity_ids() == snapshot +async def test_outlet_insertion_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_two_outlets, + mock_get_status_filled, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test iotty switch insertion.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == snapshot + + mock_get_devices_two_outlets.return_value = test_ou_one_added + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should have three devices + assert hass.states.async_entity_ids_count() == 3 + assert hass.states.async_entity_ids() == snapshot + + async def test_api_not_ok_entities_stay_the_same_as_before( hass: HomeAssistant, mock_config_entry: MockConfigEntry, From 5afb9a5053038cfc02832b55f6e3456e89591037 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 15:43:46 +0100 Subject: [PATCH 532/677] Validate password before restoring backup (#133647) * Validate password before restoring backup * Raise specific error when password is incorrect --- homeassistant/components/backup/manager.py | 12 +++- homeassistant/components/backup/util.py | 37 ++++++++++- homeassistant/components/backup/websocket.py | 26 ++++---- .../backup/snapshots/test_websocket.ambr | 11 ++++ tests/components/backup/test_manager.py | 63 ++++++++++++++++++- tests/components/backup/test_util.py | 48 +++++++++++++- tests/components/backup/test_websocket.py | 40 +++++++++++- 7 files changed, 220 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 4a0b8553f1c..a27c1cc7170 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -48,7 +48,11 @@ from .const import ( ) from .models import AgentBackup, Folder from .store import BackupStore -from .util import make_backup_dir, read_backup +from .util import make_backup_dir, read_backup, validate_password + + +class IncorrectPasswordError(HomeAssistantError): + """Raised when the password is incorrect.""" @dataclass(frozen=True, kw_only=True, slots=True) @@ -1269,6 +1273,12 @@ class CoreBackupReaderWriter(BackupReaderWriter): remove_after_restore = True + password_valid = await self._hass.async_add_executor_job( + validate_password, path, password + ) + if not password_valid: + raise IncorrectPasswordError("The password provided is incorrect.") + def _write_restore_file() -> None: """Write the restore file.""" Path(self._hass.config.path(RESTORE_BACKUP_FILE)).write_text( diff --git a/homeassistant/components/backup/util.py b/homeassistant/components/backup/util.py index 0cedc07443a..930625c52ca 100644 --- a/homeassistant/components/backup/util.py +++ b/homeassistant/components/backup/util.py @@ -9,11 +9,13 @@ import tarfile from typing import cast import aiohttp +from securetar import SecureTarFile +from homeassistant.backup_restore import password_to_key from homeassistant.core import HomeAssistant from homeassistant.util.json import JsonObjectType, json_loads_object -from .const import BUF_SIZE +from .const import BUF_SIZE, LOGGER from .models import AddonInfo, AgentBackup, Folder @@ -71,6 +73,39 @@ def read_backup(backup_path: Path) -> AgentBackup: ) +def validate_password(path: Path, password: str | None) -> bool: + """Validate the password.""" + with tarfile.open(path, "r:", bufsize=BUF_SIZE) as backup_file: + compressed = False + ha_tar_name = "homeassistant.tar" + try: + ha_tar = backup_file.extractfile(ha_tar_name) + except KeyError: + compressed = True + ha_tar_name = "homeassistant.tar.gz" + try: + ha_tar = backup_file.extractfile(ha_tar_name) + except KeyError: + LOGGER.error("No homeassistant.tar or homeassistant.tar.gz found") + return False + try: + with SecureTarFile( + path, # Not used + gzip=compressed, + key=password_to_key(password) if password is not None else None, + mode="r", + fileobj=ha_tar, + ): + # If we can read the tar file, the password is correct + return True + except tarfile.ReadError: + LOGGER.debug("Invalid password") + return False + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected error validating password") + return False + + async def receive_file( hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path ) -> None: diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 718ffc3ae44..0139b7fdb77 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant, callback from .config import ScheduleState from .const import DATA_MANAGER, LOGGER -from .manager import ManagerStateEvent +from .manager import IncorrectPasswordError, ManagerStateEvent from .models import Folder @@ -131,16 +131,20 @@ async def handle_restore( msg: dict[str, Any], ) -> None: """Restore a backup.""" - await hass.data[DATA_MANAGER].async_restore_backup( - msg["backup_id"], - agent_id=msg["agent_id"], - password=msg.get("password"), - restore_addons=msg.get("restore_addons"), - restore_database=msg["restore_database"], - restore_folders=msg.get("restore_folders"), - restore_homeassistant=msg["restore_homeassistant"], - ) - connection.send_result(msg["id"]) + try: + await hass.data[DATA_MANAGER].async_restore_backup( + msg["backup_id"], + agent_id=msg["agent_id"], + password=msg.get("password"), + restore_addons=msg.get("restore_addons"), + restore_database=msg["restore_database"], + restore_folders=msg.get("restore_folders"), + restore_homeassistant=msg["restore_homeassistant"], + ) + except IncorrectPasswordError: + connection.send_error(msg["id"], "password_incorrect", "Incorrect password") + else: + connection.send_result(msg["id"]) @websocket_api.require_admin diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index dbad733d83a..4de06861b67 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -3050,6 +3050,17 @@ # name: test_restore_remote_agent[remote_agents1-backups1].1 1 # --- +# name: test_restore_wrong_password + dict({ + 'error': dict({ + 'code': 'password_incorrect', + 'message': 'Incorrect password', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- # name: test_subscribe_event dict({ 'event': dict({ diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index e976ad0c099..1c45c86149b 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -1120,6 +1120,9 @@ async def test_async_trigger_restore( patch("pathlib.Path.open"), patch("pathlib.Path.write_text") as mocked_write_text, patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + patch( + "homeassistant.components.backup.manager.validate_password" + ) as validate_password_mock, patch.object(BackupAgentTest, "async_download_backup") as download_mock, ): download_mock.return_value.__aiter__.return_value = iter((b"backup data",)) @@ -1132,19 +1135,72 @@ async def test_async_trigger_restore( restore_folders=None, restore_homeassistant=restore_homeassistant, ) + backup_path = f"{hass.config.path()}/{dir}/abc123.tar" expected_restore_file = json.dumps( { - "path": f"{hass.config.path()}/{dir}/abc123.tar", + "path": backup_path, "password": password, "remove_after_restore": agent_id != LOCAL_AGENT_ID, "restore_database": restore_database, "restore_homeassistant": restore_homeassistant, } ) + validate_password_mock.assert_called_once_with(Path(backup_path), password) assert mocked_write_text.call_args[0][0] == expected_restore_file assert mocked_service_call.called +async def test_async_trigger_restore_wrong_password(hass: HomeAssistant) -> None: + """Test trigger restore.""" + password = "hunter2" + manager = BackupManager(hass, CoreBackupReaderWriter(hass)) + hass.data[DATA_MANAGER] = manager + + await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform) + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock( + return_value=[BackupAgentTest("remote", backups=[TEST_BACKUP_ABC123])] + ), + spec_set=BackupAgentPlatformProtocol, + ), + ) + await manager.load_platforms() + + local_agent = manager.backup_agents[LOCAL_AGENT_ID] + local_agent._backups = {TEST_BACKUP_ABC123.backup_id: TEST_BACKUP_ABC123} + local_agent._loaded_backups = True + + with ( + patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text") as mocked_write_text, + patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + patch( + "homeassistant.components.backup.manager.validate_password" + ) as validate_password_mock, + ): + validate_password_mock.return_value = False + with pytest.raises( + HomeAssistantError, match="The password provided is incorrect." + ): + await manager.async_restore_backup( + TEST_BACKUP_ABC123.backup_id, + agent_id=LOCAL_AGENT_ID, + password=password, + restore_addons=None, + restore_database=True, + restore_folders=None, + restore_homeassistant=True, + ) + + backup_path = f"{hass.config.path()}/backups/abc123.tar" + validate_password_mock.assert_called_once_with(Path(backup_path), password) + mocked_write_text.assert_not_called() + mocked_service_call.assert_not_called() + + @pytest.mark.parametrize( ("parameters", "expected_error"), [ @@ -1191,6 +1247,11 @@ async def test_async_trigger_restore_wrong_parameters( with ( patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text") as mocked_write_text, + patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, pytest.raises(HomeAssistantError, match=expected_error), ): await manager.async_restore_backup(**(default_parameters | parameters)) + + mocked_write_text.assert_not_called() + mocked_service_call.assert_not_called() diff --git a/tests/components/backup/test_util.py b/tests/components/backup/test_util.py index 888029f2e35..60cfc77b1aa 100644 --- a/tests/components/backup/test_util.py +++ b/tests/components/backup/test_util.py @@ -2,12 +2,13 @@ from __future__ import annotations +import tarfile from unittest.mock import Mock, patch import pytest from homeassistant.components.backup import AddonInfo, AgentBackup, Folder -from homeassistant.components.backup.util import read_backup +from homeassistant.components.backup.util import read_backup, validate_password @pytest.mark.parametrize( @@ -83,6 +84,49 @@ def test_read_backup(backup_json_content: bytes, expected_backup: AgentBackup) - mock_path.stat.return_value.st_size = 1234 with patch("homeassistant.components.backup.util.tarfile.open") as mock_open_tar: - mock_open_tar.return_value.__enter__.return_value.extractfile().read.return_value = backup_json_content + mock_open_tar.return_value.__enter__.return_value.extractfile.return_value.read.return_value = backup_json_content backup = read_backup(mock_path) assert backup == expected_backup + + +@pytest.mark.parametrize("password", [None, "hunter2"]) +def test_validate_password(password: str | None) -> None: + """Test validating a password.""" + mock_path = Mock() + + with ( + patch("homeassistant.components.backup.util.tarfile.open"), + patch("homeassistant.components.backup.util.SecureTarFile"), + ): + assert validate_password(mock_path, password) is True + + +@pytest.mark.parametrize("password", [None, "hunter2"]) +@pytest.mark.parametrize("secure_tar_side_effect", [tarfile.ReadError, Exception]) +def test_validate_password_wrong_password( + password: str | None, secure_tar_side_effect: Exception +) -> None: + """Test validating a password.""" + mock_path = Mock() + + with ( + patch("homeassistant.components.backup.util.tarfile.open"), + patch( + "homeassistant.components.backup.util.SecureTarFile", + ) as mock_secure_tar, + ): + mock_secure_tar.return_value.__enter__.side_effect = secure_tar_side_effect + assert validate_password(mock_path, password) is False + + +def test_validate_password_no_homeassistant() -> None: + """Test validating a password.""" + mock_path = Mock() + + with ( + patch("homeassistant.components.backup.util.tarfile.open") as mock_open_tar, + ): + mock_open_tar.return_value.__enter__.return_value.extractfile.side_effect = ( + KeyError + ) + assert validate_password(mock_path, "hunter2") is False diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index c75fb978082..b407241be54 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -571,6 +571,7 @@ async def test_restore_local_agent( with ( patch("pathlib.Path.exists", return_value=True), patch("pathlib.Path.write_text"), + patch("homeassistant.components.backup.manager.validate_password"), ): await client.send_json_auto_id( { @@ -606,7 +607,11 @@ async def test_restore_remote_agent( client = await hass_ws_client(hass) await hass.async_block_till_done() - with patch("pathlib.Path.write_text"), patch("pathlib.Path.open"): + with ( + patch("pathlib.Path.write_text"), + patch("pathlib.Path.open"), + patch("homeassistant.components.backup.manager.validate_password"), + ): await client.send_json_auto_id( { "type": "backup/restore", @@ -618,6 +623,39 @@ async def test_restore_remote_agent( assert len(restart_calls) == snapshot +async def test_restore_wrong_password( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test calling the restore command.""" + await setup_backup_integration( + hass, with_hassio=False, backups={LOCAL_AGENT_ID: [TEST_BACKUP_ABC123]} + ) + restart_calls = async_mock_service(hass, "homeassistant", "restart") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with ( + patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text"), + patch( + "homeassistant.components.backup.manager.validate_password", + return_value=False, + ), + ): + await client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": "abc123", + "agent_id": "backup.local", + } + ) + assert await client.receive_json() == snapshot + assert len(restart_calls) == 0 + + @pytest.mark.parametrize( "access_token_fixture_name", ["hass_access_token", "hass_supervisor_access_token"], From 87f5a7057edb93098030e271b6097c20a775417b Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Fri, 20 Dec 2024 16:00:44 +0100 Subject: [PATCH 533/677] Fix target temperature for AtlanticElectricalTowelDryer in Overkiz (#133657) --- .../climate/atlantic_electrical_towel_dryer.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py b/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py index 92bd6ceae82..0b5ba3ffcc7 100644 --- a/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py +++ b/homeassistant/components/overkiz/climate/atlantic_electrical_towel_dryer.py @@ -84,12 +84,15 @@ class AtlanticElectricalTowelDryer(OverkizEntity, ClimateEntity): ) @property - def target_temperature(self) -> None: - """Return the temperature.""" - if self.hvac_mode == HVACMode.AUTO: - self.executor.select_state(OverkizState.IO_EFFECTIVE_TEMPERATURE_SETPOINT) - else: - self.executor.select_state(OverkizState.CORE_TARGET_TEMPERATURE) + def target_temperature(self) -> float | None: + """Return the target temperature.""" + state = ( + OverkizState.IO_EFFECTIVE_TEMPERATURE_SETPOINT + if self.hvac_mode == HVACMode.AUTO + else OverkizState.CORE_TARGET_TEMPERATURE + ) + + return cast(float, self.executor.select_state(state)) @property def current_temperature(self) -> float | None: From ad7a334147213e01e45b5ea15804d75629e3f362 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 16:08:31 +0100 Subject: [PATCH 534/677] Add translations to Mealie exceptions (#133648) --- homeassistant/components/mealie/__init__.py | 10 ++++++++-- .../components/mealie/coordinator.py | 20 ++++++++++++------- .../components/mealie/quality_scale.yaml | 2 +- homeassistant/components/mealie/strings.json | 15 ++++++++++++++ 4 files changed, 37 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/mealie/__init__.py b/homeassistant/components/mealie/__init__.py index 443c8fdd991..5e1523b939a 100644 --- a/homeassistant/components/mealie/__init__.py +++ b/homeassistant/components/mealie/__init__.py @@ -52,9 +52,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: MealieConfigEntry) -> bo about = await client.get_about() version = create_version(about.version) except MealieAuthenticationError as error: - raise ConfigEntryAuthFailed from error + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_failed", + ) from error except MealieError as error: - raise ConfigEntryNotReady(error) from error + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="setup_failed", + ) from error if not version.valid: LOGGER.warning( diff --git a/homeassistant/components/mealie/coordinator.py b/homeassistant/components/mealie/coordinator.py index 051586e53c2..7d4f23d706e 100644 --- a/homeassistant/components/mealie/coordinator.py +++ b/homeassistant/components/mealie/coordinator.py @@ -23,7 +23,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed import homeassistant.util.dt as dt_util -from .const import LOGGER +from .const import DOMAIN, LOGGER WEEK = timedelta(days=7) @@ -53,7 +53,7 @@ class MealieDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): super().__init__( hass, LOGGER, - name=self._name, + name=f"Mealie {self._name}", update_interval=self._update_interval, ) self.client = client @@ -63,9 +63,15 @@ class MealieDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): try: return await self._async_update_internal() except MealieAuthenticationError as error: - raise ConfigEntryAuthFailed from error + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_failed", + ) from error except MealieConnectionError as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key=f"update_failed_{self._name}", + ) from error @abstractmethod async def _async_update_internal(self) -> _DataT: @@ -77,7 +83,7 @@ class MealieMealplanCoordinator( ): """Class to manage fetching Mealie data.""" - _name = "MealieMealplan" + _name = "mealplan" _update_interval = timedelta(hours=1) async def _async_update_internal(self) -> dict[MealplanEntryType, list[Mealplan]]: @@ -106,7 +112,7 @@ class MealieShoppingListCoordinator( ): """Class to manage fetching Mealie Shopping list data.""" - _name = "MealieShoppingList" + _name = "shopping_list" _update_interval = timedelta(minutes=5) async def _async_update_internal( @@ -130,7 +136,7 @@ class MealieShoppingListCoordinator( class MealieStatisticsCoordinator(MealieDataUpdateCoordinator[Statistics]): """Class to manage fetching Mealie Statistics data.""" - _name = "MealieStatistics" + _name = "statistics" _update_interval = timedelta(minutes=15) async def _async_update_internal( diff --git a/homeassistant/components/mealie/quality_scale.yaml b/homeassistant/components/mealie/quality_scale.yaml index 9153bf7aadf..c72cde3672d 100644 --- a/homeassistant/components/mealie/quality_scale.yaml +++ b/homeassistant/components/mealie/quality_scale.yaml @@ -61,7 +61,7 @@ rules: comment: | This integration does not have any irrelevant entities. entity-translations: done - exception-translations: todo + exception-translations: done icon-translations: done reconfiguration-flow: done repair-issues: diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index de91c507950..e80db7ab3b0 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -126,6 +126,21 @@ }, "version_error": { "message": "You are running {mealie_version} of Mealie. Minimum required version is {min_version}. Please upgrade Mealie and then retry." + }, + "auth_failed": { + "message": "Authentication failed. Please reauthenticate." + }, + "update_failed_mealplan": { + "message": "Could not fetch mealplan data." + }, + "update_failed_shopping_list": { + "message": "Could not fetch shopping list data." + }, + "update_failed_statistics": { + "message": "Could not fetch statistics data." + }, + "setup_failed": { + "message": "Could not connect to the Mealie instance." } }, "services": { From 92195ff77dafdd4e73dffcf3a5e700e2f83feaf3 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Fri, 20 Dec 2024 17:10:37 +0100 Subject: [PATCH 535/677] Bump pypck to 0.8.1 (#133646) Co-authored-by: Robert Resch --- homeassistant/components/lcn/__init__.py | 39 ++++++++++++--------- homeassistant/components/lcn/config_flow.py | 5 ++- homeassistant/components/lcn/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/lcn/test_config_flow.py | 13 +++++-- tests/components/lcn/test_init.py | 31 +++++++++++----- 7 files changed, 63 insertions(+), 31 deletions(-) diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index 6dc6fb1ecc4..a10d08ad073 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -6,7 +6,14 @@ from functools import partial import logging import pypck -from pypck.connection import PchkConnectionManager +from pypck.connection import ( + PchkAuthenticationError, + PchkConnectionFailedError, + PchkConnectionManager, + PchkConnectionRefusedError, + PchkLcnNotConnectedError, + PchkLicenseError, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -20,6 +27,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.typing import ConfigType @@ -81,24 +89,21 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b settings=settings, connection_id=config_entry.entry_id, ) + try: # establish connection to PCHK server await lcn_connection.async_connect(timeout=15) - except pypck.connection.PchkAuthenticationError: - _LOGGER.warning('Authentication on PCHK "%s" failed', config_entry.title) - return False - except pypck.connection.PchkLicenseError: - _LOGGER.warning( - ( - 'Maximum number of connections on PCHK "%s" was ' - "reached. An additional license key is required" - ), - config_entry.title, - ) - return False - except TimeoutError: - _LOGGER.warning('Connection to PCHK "%s" failed', config_entry.title) - return False + except ( + PchkAuthenticationError, + PchkLicenseError, + PchkConnectionRefusedError, + PchkConnectionFailedError, + PchkLcnNotConnectedError, + ) as ex: + await lcn_connection.async_close() + raise ConfigEntryNotReady( + f"Unable to connect to {config_entry.title}: {ex}" + ) from ex _LOGGER.debug('LCN connected to "%s"', config_entry.title) hass.data[DOMAIN][config_entry.entry_id] = { @@ -106,6 +111,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b DEVICE_CONNECTIONS: {}, ADD_ENTITIES_CALLBACKS: {}, } + # Update config_entry with LCN device serials await async_update_config_entry(hass, config_entry) @@ -121,6 +127,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b input_received = partial( async_host_input_received, hass, config_entry, device_registry ) + lcn_connection.register_for_inputs(input_received) return True diff --git a/homeassistant/components/lcn/config_flow.py b/homeassistant/components/lcn/config_flow.py index 008265e62ae..a1be32704f7 100644 --- a/homeassistant/components/lcn/config_flow.py +++ b/homeassistant/components/lcn/config_flow.py @@ -96,7 +96,10 @@ async def validate_connection(data: ConfigType) -> str | None: host_name, ) error = "license_error" - except (TimeoutError, ConnectionRefusedError): + except ( + pypck.connection.PchkConnectionFailedError, + pypck.connection.PchkConnectionRefusedError, + ): _LOGGER.warning('Connection to PCHK "%s" failed', host_name) error = "connection_refused" diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index 695a35df871..f5eb1654588 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.24", "lcn-frontend==0.2.2"] + "requirements": ["pypck==0.8.1", "lcn-frontend==0.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 06cf4145b48..62bc0528605 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2174,7 +2174,7 @@ pypalazzetti==0.1.15 pypca==0.0.7 # homeassistant.components.lcn -pypck==0.7.24 +pypck==0.8.1 # homeassistant.components.pjlink pypjlink2==1.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3b9edd9e7c1..d4917df83b7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1767,7 +1767,7 @@ pyownet==0.10.0.post1 pypalazzetti==0.1.15 # homeassistant.components.lcn -pypck==0.7.24 +pypck==0.8.1 # homeassistant.components.pjlink pypjlink2==1.2.1 diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index b7967c247ec..478f2c0949e 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -2,7 +2,12 @@ from unittest.mock import patch -from pypck.connection import PchkAuthenticationError, PchkLicenseError +from pypck.connection import ( + PchkAuthenticationError, + PchkConnectionFailedError, + PchkConnectionRefusedError, + PchkLicenseError, +) import pytest from homeassistant import config_entries, data_entry_flow @@ -98,7 +103,8 @@ async def test_step_user_existing_host( [ (PchkAuthenticationError, {CONF_BASE: "authentication_error"}), (PchkLicenseError, {CONF_BASE: "license_error"}), - (TimeoutError, {CONF_BASE: "connection_refused"}), + (PchkConnectionFailedError, {CONF_BASE: "connection_refused"}), + (PchkConnectionRefusedError, {CONF_BASE: "connection_refused"}), ], ) async def test_step_user_error( @@ -149,7 +155,8 @@ async def test_step_reconfigure(hass: HomeAssistant, entry: MockConfigEntry) -> [ (PchkAuthenticationError, {CONF_BASE: "authentication_error"}), (PchkLicenseError, {CONF_BASE: "license_error"}), - (TimeoutError, {CONF_BASE: "connection_refused"}), + (PchkConnectionFailedError, {CONF_BASE: "connection_refused"}), + (PchkConnectionRefusedError, {CONF_BASE: "connection_refused"}), ], ) async def test_step_reconfigure_error( diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index 2327635e356..bffa91d14ef 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -2,7 +2,13 @@ from unittest.mock import Mock, patch -from pypck.connection import PchkAuthenticationError, PchkLicenseError +from pypck.connection import ( + PchkAuthenticationError, + PchkConnectionFailedError, + PchkConnectionRefusedError, + PchkLcnNotConnectedError, + PchkLicenseError, +) import pytest from homeassistant import config_entries @@ -84,21 +90,30 @@ async def test_async_setup_entry_update( @pytest.mark.parametrize( - "exception", [PchkAuthenticationError, PchkLicenseError, TimeoutError] + "exception", + [ + PchkAuthenticationError, + PchkLicenseError, + PchkConnectionRefusedError, + PchkConnectionFailedError, + PchkLcnNotConnectedError, + ], ) -async def test_async_setup_entry_raises_authentication_error( +async def test_async_setup_entry_fails( hass: HomeAssistant, entry: MockConfigEntry, exception: Exception ) -> None: - """Test that an authentication error is handled properly.""" - with patch( - "homeassistant.components.lcn.PchkConnectionManager.async_connect", - side_effect=exception, + """Test that an error is handled properly.""" + with ( + patch( + "homeassistant.components.lcn.PchkConnectionManager.async_connect", + side_effect=exception, + ), ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert entry.state is ConfigEntryState.SETUP_ERROR + assert entry.state is ConfigEntryState.SETUP_RETRY @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) From 233395c18113ad50084ef1ccc0c92ef414d02e29 Mon Sep 17 00:00:00 2001 From: elmurato <1382097+elmurato@users.noreply.github.com> Date: Fri, 20 Dec 2024 17:58:31 +0100 Subject: [PATCH 536/677] Add missing await in Minecraft Server (#133670) --- homeassistant/components/minecraft_server/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/minecraft_server/__init__.py b/homeassistant/components/minecraft_server/__init__.py index 8f016e2de00..f937c304471 100644 --- a/homeassistant/components/minecraft_server/__init__.py +++ b/homeassistant/components/minecraft_server/__init__.py @@ -43,7 +43,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Minecraft Server from a config entry.""" # Workaround to avoid blocking imports from dnspython (https://github.com/rthalley/dnspython/issues/1083) - hass.async_add_executor_job(load_dnspython_rdata_classes) + await hass.async_add_executor_job(load_dnspython_rdata_classes) # Create API instance. api = MinecraftServer( From 6ed345f7732889d5a5d4f5dde246c3069592374d Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Fri, 20 Dec 2024 17:20:24 +0000 Subject: [PATCH 537/677] Add check for client errors to stream component (#132866) --- homeassistant/components/stream/__init__.py | 111 ++++++++++++++++++++ tests/components/stream/test_init.py | 80 +++++++++++++- 2 files changed, 190 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/stream/__init__.py b/homeassistant/components/stream/__init__.py index 64c520150c2..1471db890d7 100644 --- a/homeassistant/components/stream/__init__.py +++ b/homeassistant/components/stream/__init__.py @@ -20,6 +20,7 @@ from __future__ import annotations import asyncio from collections.abc import Callable, Mapping import copy +from enum import IntEnum import logging import secrets import threading @@ -45,6 +46,7 @@ from .const import ( CONF_EXTRA_PART_WAIT_TIME, CONF_LL_HLS, CONF_PART_DURATION, + CONF_PREFER_TCP, CONF_RTSP_TRANSPORT, CONF_SEGMENT_DURATION, CONF_USE_WALLCLOCK_AS_TIMESTAMPS, @@ -74,6 +76,8 @@ from .diagnostics import Diagnostics from .hls import HlsStreamOutput, async_setup_hls if TYPE_CHECKING: + from av.container import InputContainer, OutputContainer + from homeassistant.components.camera import DynamicStreamSettings __all__ = [ @@ -95,6 +99,113 @@ __all__ = [ _LOGGER = logging.getLogger(__name__) +class StreamClientError(IntEnum): + """Enum for stream client errors.""" + + BadRequest = 400 + Unauthorized = 401 + Forbidden = 403 + NotFound = 404 + Other = 4 + + +class StreamOpenClientError(HomeAssistantError): + """Raised when client error received when trying to open a stream. + + :param stream_client_error: The type of client error + """ + + def __init__( + self, *args: Any, stream_client_error: StreamClientError, **kwargs: Any + ) -> None: + self.stream_client_error = stream_client_error + super().__init__(*args, **kwargs) + + +async def _async_try_open_stream( + hass: HomeAssistant, source: str, pyav_options: dict[str, str] | None = None +) -> InputContainer | OutputContainer: + """Try to open a stream. + + Will raise StreamOpenClientError if an http client error is encountered. + """ + return await hass.loop.run_in_executor(None, _try_open_stream, source, pyav_options) + + +def _try_open_stream( + source: str, pyav_options: dict[str, str] | None = None +) -> InputContainer | OutputContainer: + """Try to open a stream. + + Will raise StreamOpenClientError if an http client error is encountered. + """ + import av # pylint: disable=import-outside-toplevel + + if pyav_options is None: + pyav_options = {} + + default_pyav_options = { + "rtsp_flags": CONF_PREFER_TCP, + "timeout": str(SOURCE_TIMEOUT), + } + + pyav_options = { + **default_pyav_options, + **pyav_options, + } + + try: + container = av.open(source, options=pyav_options, timeout=5) + + except av.HTTPBadRequestError as ex: + raise StreamOpenClientError( + stream_client_error=StreamClientError.BadRequest + ) from ex + + except av.HTTPUnauthorizedError as ex: + raise StreamOpenClientError( + stream_client_error=StreamClientError.Unauthorized + ) from ex + + except av.HTTPForbiddenError as ex: + raise StreamOpenClientError( + stream_client_error=StreamClientError.Forbidden + ) from ex + + except av.HTTPNotFoundError as ex: + raise StreamOpenClientError( + stream_client_error=StreamClientError.NotFound + ) from ex + + except av.HTTPOtherClientError as ex: + raise StreamOpenClientError(stream_client_error=StreamClientError.Other) from ex + + else: + return container + + +async def async_check_stream_client_error( + hass: HomeAssistant, source: str, pyav_options: dict[str, str] | None = None +) -> None: + """Check if a stream can be successfully opened. + + Raise StreamOpenClientError if an http client error is encountered. + """ + await hass.loop.run_in_executor( + None, _check_stream_client_error, source, pyav_options + ) + + +def _check_stream_client_error( + source: str, pyav_options: dict[str, str] | None = None +) -> None: + """Check if a stream can be successfully opened. + + Raise StreamOpenClientError if an http client error is encountered. + """ + _try_open_stream(source, pyav_options).close() + + def redact_credentials(url: str) -> str: """Redact credentials from string data.""" yurl = URL(url) diff --git a/tests/components/stream/test_init.py b/tests/components/stream/test_init.py index 1ae6f9e8931..5f9d305620d 100644 --- a/tests/components/stream/test_init.py +++ b/tests/components/stream/test_init.py @@ -1,11 +1,20 @@ """Test stream init.""" import logging +from unittest.mock import MagicMock, patch import av import pytest -from homeassistant.components.stream import __name__ as stream_name +from homeassistant.components.stream import ( + CONF_PREFER_TCP, + SOURCE_TIMEOUT, + StreamClientError, + StreamOpenClientError, + __name__ as stream_name, + _async_try_open_stream, + async_check_stream_client_error, +) from homeassistant.const import EVENT_LOGGING_CHANGED from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -53,3 +62,72 @@ async def test_log_levels( assert "SHOULD PASS" in caplog.text assert "SHOULD NOT PASS" not in caplog.text + + +async def test_check_open_stream_params(hass: HomeAssistant) -> None: + """Test check open stream params.""" + + container_mock = MagicMock() + source = "rtsp://foobar" + + with patch("av.open", return_value=container_mock) as open_mock: + await async_check_stream_client_error(hass, source) + + options = { + "rtsp_flags": CONF_PREFER_TCP, + "timeout": str(SOURCE_TIMEOUT), + } + open_mock.assert_called_once_with(source, options=options, timeout=5) + container_mock.close.assert_called_once() + + container_mock.reset_mock() + with patch("av.open", return_value=container_mock) as open_mock: + await async_check_stream_client_error(hass, source, {"foo": "bar"}) + + options = { + "rtsp_flags": CONF_PREFER_TCP, + "timeout": str(SOURCE_TIMEOUT), + "foo": "bar", + } + open_mock.assert_called_once_with(source, options=options, timeout=5) + container_mock.close.assert_called_once() + + +@pytest.mark.parametrize( + ("error", "enum_result"), + [ + pytest.param( + av.HTTPBadRequestError(400, ""), + StreamClientError.BadRequest, + id="BadRequest", + ), + pytest.param( + av.HTTPUnauthorizedError(401, ""), + StreamClientError.Unauthorized, + id="Unauthorized", + ), + pytest.param( + av.HTTPForbiddenError(403, ""), StreamClientError.Forbidden, id="Forbidden" + ), + pytest.param( + av.HTTPNotFoundError(404, ""), StreamClientError.NotFound, id="NotFound" + ), + pytest.param( + av.HTTPOtherClientError(408, ""), StreamClientError.Other, id="Other" + ), + ], +) +async def test_try_open_stream_error( + hass: HomeAssistant, error: av.HTTPClientError, enum_result: StreamClientError +) -> None: + """Test trying to open a stream.""" + oc_error: StreamOpenClientError | None = None + + with patch("av.open", side_effect=error): + try: + await _async_try_open_stream(hass, "rtsp://foobar") + except StreamOpenClientError as ex: + oc_error = ex + + assert oc_error + assert oc_error.stream_client_error is enum_result From 17f0c2489534ef9bc42513a1d90c921405740deb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Fri, 20 Dec 2024 17:24:57 +0000 Subject: [PATCH 538/677] Replace tests for Idasen Desk with parameterized test (#133672) --- .../components/idasen_desk/quality_scale.yaml | 1 - .../idasen_desk/test_config_flow.py | 134 ++---------------- 2 files changed, 11 insertions(+), 124 deletions(-) diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml index 4af2f489bd3..f91fd16176d 100644 --- a/homeassistant/components/idasen_desk/quality_scale.yaml +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -14,7 +14,6 @@ rules: status: todo comment: | - use mock_desk_api - - merge test_user_step_auth_failed, test_user_step_cannot_connect and test_user_step_unknown_exception. config-flow: done dependency-transparency: done docs-actions: diff --git a/tests/components/idasen_desk/test_config_flow.py b/tests/components/idasen_desk/test_config_flow.py index c27cdea58aa..be729545b88 100644 --- a/tests/components/idasen_desk/test_config_flow.py +++ b/tests/components/idasen_desk/test_config_flow.py @@ -89,9 +89,17 @@ async def test_user_step_no_new_devices_found(hass: HomeAssistant) -> None: assert result["reason"] == "no_devices_found" -@pytest.mark.parametrize("exception", [TimeoutError(), BleakError()]) +@pytest.mark.parametrize( + ("exception", "expected_error"), + [ + (TimeoutError, "cannot_connect"), + (BleakError, "cannot_connect"), + (AuthFailedError, "auth_failed"), + (RuntimeError, "unknown"), + ], +) async def test_user_step_cannot_connect( - hass: HomeAssistant, exception: Exception + hass: HomeAssistant, exception: Exception, expected_error: str ) -> None: """Test user step with a cannot connect error.""" with patch( @@ -122,7 +130,7 @@ async def test_user_step_cannot_connect( assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "user" - assert result2["errors"] == {"base": "cannot_connect"} + assert result2["errors"] == {"base": expected_error} with ( patch("homeassistant.components.idasen_desk.config_flow.Desk.connect"), @@ -149,126 +157,6 @@ async def test_user_step_cannot_connect( assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_step_auth_failed(hass: HomeAssistant) -> None: - """Test user step with an auth failed error.""" - with patch( - "homeassistant.components.idasen_desk.config_flow.async_discovered_service_info", - return_value=[IDASEN_DISCOVERY_INFO], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - with ( - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.connect", - side_effect=AuthFailedError, - ), - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "user" - assert result2["errors"] == {"base": "auth_failed"} - - with ( - patch("homeassistant.components.idasen_desk.config_flow.Desk.connect"), - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - patch( - "homeassistant.components.idasen_desk.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - }, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == IDASEN_DISCOVERY_INFO.name - assert result3["data"] == { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - } - assert result3["result"].unique_id == IDASEN_DISCOVERY_INFO.address - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_user_step_unknown_exception(hass: HomeAssistant) -> None: - """Test user step with an unknown exception.""" - with patch( - "homeassistant.components.idasen_desk.config_flow.async_discovered_service_info", - return_value=[NOT_IDASEN_DISCOVERY_INFO, IDASEN_DISCOVERY_INFO], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - with ( - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.connect", - side_effect=RuntimeError, - ), - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.disconnect", - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "user" - assert result2["errors"] == {"base": "unknown"} - - with ( - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.connect", - ), - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.disconnect", - ), - patch( - "homeassistant.components.idasen_desk.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - }, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == IDASEN_DISCOVERY_INFO.name - assert result3["data"] == { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - } - assert result3["result"].unique_id == IDASEN_DISCOVERY_INFO.address - assert len(mock_setup_entry.mock_calls) == 1 - - async def test_bluetooth_step_success(hass: HomeAssistant) -> None: """Test bluetooth step success path.""" result = await hass.config_entries.flow.async_init( From a23b37114e25d87892f2ccd3b366701dacb548b1 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 20 Dec 2024 18:41:14 +0100 Subject: [PATCH 539/677] Improve recorder data migrator tests (#133628) --- .../recorder/test_migration_from_schema_32.py | 93 ++++++++++++++++--- 1 file changed, 80 insertions(+), 13 deletions(-) diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index e42cd22e952..3cc654c0fa1 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -13,6 +13,7 @@ import pytest from sqlalchemy import create_engine, inspect from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session +from sqlalchemy.schema import Index from homeassistant.components import recorder from homeassistant.components.recorder import ( @@ -120,9 +121,11 @@ def db_schema_32(): @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) +@pytest.mark.parametrize("indices_to_drop", [[], [("events", "ix_events_context_id")]]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_events_context_ids( async_test_recorder: RecorderInstanceGenerator, + indices_to_drop: list[tuple[str, str]], ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" importlib.import_module(SCHEMA_MODULE_32) @@ -237,6 +240,13 @@ async def test_migrate_events_context_ids( ] await _async_wait_migration_done(hass) + # Remove index + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + for table, index in indices_to_drop: + with session_scope(hass=hass) as session: + assert get_index_by_name(session, table, index) is not None + migration._drop_index(instance.get_session, table, index) + await hass.async_stop() await hass.async_block_till_done() @@ -266,7 +276,13 @@ async def test_migrate_events_context_ids( # Run again with new schema, let migration run async with async_test_home_assistant() as hass: - with freeze_time(now), instrument_migration(hass) as instrumented_migration: + with ( + freeze_time(now), + instrument_migration(hass) as instrumented_migration, + patch( + "sqlalchemy.schema.Index.create", autospec=True, wraps=Index.create + ) as wrapped_idx_create, + ): async with async_test_recorder( hass, wait_recorder=False, wait_recorder_setup=False ) as instance: @@ -297,6 +313,10 @@ async def test_migrate_events_context_ids( await hass.async_stop() await hass.async_block_till_done() + # Check the index we removed was recreated + index_names = [call[1][0].name for call in wrapped_idx_create.mock_calls] + assert index_names == [index for _, index in indices_to_drop] + old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"] assert old_uuid_context_id_event["context_id"] is None assert old_uuid_context_id_event["context_user_id"] is None @@ -482,9 +502,11 @@ async def test_finish_migrate_events_context_ids( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) +@pytest.mark.parametrize("indices_to_drop", [[], [("states", "ix_states_context_id")]]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_states_context_ids( async_test_recorder: RecorderInstanceGenerator, + indices_to_drop: list[tuple[str, str]], ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" importlib.import_module(SCHEMA_MODULE_32) @@ -577,6 +599,13 @@ async def test_migrate_states_context_ids( await async_wait_recording_done(hass) await _async_wait_migration_done(hass) + # Remove index + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + for table, index in indices_to_drop: + with session_scope(hass=hass) as session: + assert get_index_by_name(session, table, index) is not None + migration._drop_index(instance.get_session, table, index) + await hass.async_stop() await hass.async_block_till_done() @@ -606,7 +635,12 @@ async def test_migrate_states_context_ids( # Run again with new schema, let migration run async with async_test_home_assistant() as hass: - with instrument_migration(hass) as instrumented_migration: + with ( + instrument_migration(hass) as instrumented_migration, + patch( + "sqlalchemy.schema.Index.create", autospec=True, wraps=Index.create + ) as wrapped_idx_create, + ): async with async_test_recorder( hass, wait_recorder=False, wait_recorder_setup=False ) as instance: @@ -637,6 +671,10 @@ async def test_migrate_states_context_ids( await hass.async_stop() await hass.async_block_till_done() + # Check the index we removed was recreated + index_names = [call[1][0].name for call in wrapped_idx_create.mock_calls] + assert index_names == [index for _, index in indices_to_drop] + old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"] assert old_uuid_context_id["context_id"] is None assert old_uuid_context_id["context_user_id"] is None @@ -1049,9 +1087,13 @@ async def test_migrate_entity_ids( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +@pytest.mark.parametrize( + "indices_to_drop", [[], [("states", "ix_states_entity_id_last_updated_ts")]] +) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_post_migrate_entity_ids( async_test_recorder: RecorderInstanceGenerator, + indices_to_drop: list[tuple[str, str]], ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" importlib.import_module(SCHEMA_MODULE_32) @@ -1096,6 +1138,13 @@ async def test_post_migrate_entity_ids( await async_wait_recording_done(hass) await _async_wait_migration_done(hass) + # Remove index + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + for table, index in indices_to_drop: + with session_scope(hass=hass) as session: + assert get_index_by_name(session, table, index) is not None + migration._drop_index(instance.get_session, table, index) + await hass.async_stop() await hass.async_block_till_done() @@ -1109,20 +1158,38 @@ async def test_post_migrate_entity_ids( return {state.state: state.entity_id for state in states} # Run again with new schema, let migration run - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + with patch( + "sqlalchemy.schema.Index.create", autospec=True, wraps=Index.create + ) as wrapped_idx_create: + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - await hass.async_block_till_done() - await async_wait_recording_done(hass) - await async_wait_recording_done(hass) + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - states_by_state = await instance.async_add_executor_job(_fetch_migrated_states) + states_by_state = await instance.async_add_executor_job( + _fetch_migrated_states + ) - await hass.async_stop() - await hass.async_block_till_done() + # Check the index which will be removed by the migrator no longer exists + with session_scope(hass=hass) as session: + assert ( + get_index_by_name( + session, "states", "ix_states_entity_id_last_updated_ts" + ) + is None + ) + + await hass.async_stop() + await hass.async_block_till_done() + + # Check the index we removed was recreated + index_names = [call[1][0].name for call in wrapped_idx_create.mock_calls] + assert index_names == [index for _, index in indices_to_drop] assert states_by_state["one_1"] is None assert states_by_state["two_2"] is None From c780933fa06db399c4b8ecfa22359cb44af9c2e9 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Fri, 20 Dec 2024 19:12:48 +0100 Subject: [PATCH 540/677] Reword invoke_pin action to avoid misunderstanding with "PIN" (#133665) * Reword invoke_pin action to avoid misunderstanding with "PIN" The previous mismatch between "PIN" and "pin" in the invoke_pin caused wrong translations as "PIN" was interpreted as the abbreviation for "Personal Identification Number". This commit fixes this by explaining "pin" as related to "pinning" content on the device. In addition the very "invoke" is replaced by "play" which every user and translator will understand immediately. Along with those changes this commit reverts my previous change to "PIN" in all strings that made things worse. * Use "Pin ID" for the field variable --- homeassistant/components/openhome/strings.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/openhome/strings.json b/homeassistant/components/openhome/strings.json index a757a2cb31c..f4b15e52e7c 100644 --- a/homeassistant/components/openhome/strings.json +++ b/homeassistant/components/openhome/strings.json @@ -1,12 +1,12 @@ { "services": { "invoke_pin": { - "name": "Invoke PIN", - "description": "Invokes a PIN on the specified device.", + "name": "Play pin", + "description": "Starts playing content pinned on the specified device.", "fields": { "pin": { - "name": "PIN", - "description": "Which PIN to invoke." + "name": "Pin ID", + "description": "ID of the pinned content." } } } From 2639bdbefdbb264e36ddca09c4d4915587a529d2 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 20:21:37 +0100 Subject: [PATCH 541/677] Add parallel updates to Mealie (#133660) --- homeassistant/components/mealie/calendar.py | 2 ++ homeassistant/components/mealie/quality_scale.yaml | 2 +- homeassistant/components/mealie/sensor.py | 2 ++ homeassistant/components/mealie/todo.py | 1 + 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mealie/calendar.py b/homeassistant/components/mealie/calendar.py index 4c11c639c79..729bc16c6fd 100644 --- a/homeassistant/components/mealie/calendar.py +++ b/homeassistant/components/mealie/calendar.py @@ -13,6 +13,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import MealieConfigEntry, MealieMealplanCoordinator from .entity import MealieEntity +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/mealie/quality_scale.yaml b/homeassistant/components/mealie/quality_scale.yaml index c72cde3672d..6a77152f615 100644 --- a/homeassistant/components/mealie/quality_scale.yaml +++ b/homeassistant/components/mealie/quality_scale.yaml @@ -33,7 +33,7 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: todo + parallel-updates: done reauthentication-flow: done test-coverage: status: todo diff --git a/homeassistant/components/mealie/sensor.py b/homeassistant/components/mealie/sensor.py index 141a28ecdab..e4b1655a9d1 100644 --- a/homeassistant/components/mealie/sensor.py +++ b/homeassistant/components/mealie/sensor.py @@ -17,6 +17,8 @@ from homeassistant.helpers.typing import StateType from .coordinator import MealieConfigEntry, MealieStatisticsCoordinator from .entity import MealieEntity +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class MealieStatisticsSensorEntityDescription(SensorEntityDescription): diff --git a/homeassistant/components/mealie/todo.py b/homeassistant/components/mealie/todo.py index 508b6aeb5e2..121e0bcbf10 100644 --- a/homeassistant/components/mealie/todo.py +++ b/homeassistant/components/mealie/todo.py @@ -20,6 +20,7 @@ from .const import DOMAIN from .coordinator import MealieConfigEntry, MealieShoppingListCoordinator from .entity import MealieEntity +PARALLEL_UPDATES = 0 TODO_STATUS_MAP = { False: TodoItemStatus.NEEDS_ACTION, True: TodoItemStatus.COMPLETED, From 8607ba884cda54c3426b1caa5051287e53de7a0e Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Fri, 20 Dec 2024 13:23:12 -0600 Subject: [PATCH 542/677] Bump intents to 2024.12.20 (#133676) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- tests/components/conversation/snapshots/test_http.ambr | 3 +++ tests/components/conversation/test_default_agent.py | 8 ++++++++ 7 files changed, 16 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 41c9a2d2691..a2ddd5f734c 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.9"] + "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.20"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index bd78ef8e3fb..9473871efdd 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -35,7 +35,7 @@ hass-nabucasa==0.87.0 hassil==2.0.5 home-assistant-bluetooth==1.13.0 home-assistant-frontend==20241127.8 -home-assistant-intents==2024.12.9 +home-assistant-intents==2024.12.20 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index 62bc0528605..d644c4388c4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1137,7 +1137,7 @@ holidays==0.63 home-assistant-frontend==20241127.8 # homeassistant.components.conversation -home-assistant-intents==2024.12.9 +home-assistant-intents==2024.12.20 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d4917df83b7..a4a6f6d16c9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -966,7 +966,7 @@ holidays==0.63 home-assistant-frontend==20241127.8 # homeassistant.components.conversation -home-assistant-intents==2024.12.9 +home-assistant-intents==2024.12.20 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 369beb538ed..bd2c9d328ac 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.8,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.3 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.9 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.20 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index 9cebfd9abd1..ce3247fbbad 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -24,6 +24,7 @@ 'fr', 'gl', 'he', + 'hi', 'hr', 'hu', 'id', @@ -35,6 +36,7 @@ 'lt', 'lv', 'ml', + 'mn', 'ms', 'nb', 'nl', @@ -47,6 +49,7 @@ 'sl', 'sr', 'sv', + 'sw', 'te', 'th', 'tr', diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 8df1647d18c..7e05476a349 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -3056,6 +3056,14 @@ async def test_entities_names_are_not_templates(hass: HomeAssistant) -> None: ("language", "light_name", "on_sentence", "off_sentence"), [ ("en", "test light", "turn on test light", "turn off test light"), + ("de", "Testlicht", "Schalte Testlicht ein", "Schalte Testlicht aus"), + ( + "fr", + "lumière de test", + "Allumer la lumière de test", + "Éteindre la lumière de test", + ), + ("nl", "testlicht", "Zet testlicht aan", "Zet testlicht uit"), ("zh-cn", "卧室灯", "打开卧室灯", "关闭卧室灯"), ("zh-hk", "睡房燈", "打開睡房燈", "關閉睡房燈"), ("zh-tw", "臥室檯燈", "打開臥室檯燈", "關臥室檯燈"), From b29be34f55145fcfd900ff5c1b5915f361b6a92a Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 20 Dec 2024 21:21:41 +0100 Subject: [PATCH 543/677] Allow Filter title to be translated (#128929) --- homeassistant/components/filter/strings.json | 1 + homeassistant/generated/integrations.json | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/filter/strings.json b/homeassistant/components/filter/strings.json index 461eed9aefa..2a83a05bb96 100644 --- a/homeassistant/components/filter/strings.json +++ b/homeassistant/components/filter/strings.json @@ -1,4 +1,5 @@ { + "title": "Filter", "services": { "reload": { "name": "[%key:common::action::reload%]", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 48fedd9c127..f037b8d7ce6 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -7368,7 +7368,6 @@ "iot_class": "calculated" }, "filter": { - "name": "Filter", "integration_type": "helper", "config_flow": false, "iot_class": "local_push" @@ -7499,6 +7498,7 @@ "emulated_roku", "energenie_power_sockets", "filesize", + "filter", "garages_amsterdam", "generic", "generic_hygrostat", From 1e420f16f7ea141487b190f588aba5fdc0e20d2f Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Fri, 20 Dec 2024 16:01:56 -0500 Subject: [PATCH 544/677] Update Roborock to 2.8.4 (#133680) --- homeassistant/components/roborock/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index 69d867aa164..bb89ecedbe3 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["roborock"], "requirements": [ - "python-roborock==2.8.1", + "python-roborock==2.8.4", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/requirements_all.txt b/requirements_all.txt index d644c4388c4..75357a7d22e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2415,7 +2415,7 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.8.1 +python-roborock==2.8.4 # homeassistant.components.smarttub python-smarttub==0.0.38 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a4a6f6d16c9..88b30377514 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1945,7 +1945,7 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.8.1 +python-roborock==2.8.4 # homeassistant.components.smarttub python-smarttub==0.0.38 From 9a0035e09012c5944f2eb72fcd3c24edd104a800 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 20 Dec 2024 23:45:54 +0100 Subject: [PATCH 545/677] Fix Mealie test coverage (#133659) --- .../components/mealie/quality_scale.yaml | 4 +- homeassistant/components/mealie/todo.py | 32 +-- tests/components/mealie/test_calendar.py | 20 +- tests/components/mealie/test_todo.py | 252 +++++++++++++++--- 4 files changed, 241 insertions(+), 67 deletions(-) diff --git a/homeassistant/components/mealie/quality_scale.yaml b/homeassistant/components/mealie/quality_scale.yaml index 6a77152f615..738c5b99d91 100644 --- a/homeassistant/components/mealie/quality_scale.yaml +++ b/homeassistant/components/mealie/quality_scale.yaml @@ -35,9 +35,7 @@ rules: log-when-unavailable: done parallel-updates: done reauthentication-flow: done - test-coverage: - status: todo - comment: Platform missing tests + test-coverage: done # Gold devices: done diagnostics: done diff --git a/homeassistant/components/mealie/todo.py b/homeassistant/components/mealie/todo.py index 121e0bcbf10..be04b00113e 100644 --- a/homeassistant/components/mealie/todo.py +++ b/homeassistant/components/mealie/todo.py @@ -148,29 +148,19 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): """Update an item on the list.""" list_items = self.shopping_items - for items in list_items: - if items.item_id == item.uid: - position = items.position - break - list_item: ShoppingItem | None = next( (x for x in list_items if x.item_id == item.uid), None ) + assert list_item is not None + position = list_item.position - if not list_item: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="item_not_found_error", - translation_placeholders={"shopping_list_item": item.uid or ""}, - ) - - udpdate_shopping_item = MutateShoppingItem( + update_shopping_item = MutateShoppingItem( item_id=list_item.item_id, list_id=list_item.list_id, note=list_item.note, display=list_item.display, checked=item.status == TodoItemStatus.COMPLETED, - position=list_item.position, + position=position, is_food=list_item.is_food, disable_amount=list_item.disable_amount, quantity=list_item.quantity, @@ -182,16 +172,16 @@ class MealieShoppingListTodoListEntity(MealieEntity, TodoListEntity): stripped_item_summary = item.summary.strip() if item.summary else item.summary if list_item.display.strip() != stripped_item_summary: - udpdate_shopping_item.note = stripped_item_summary - udpdate_shopping_item.position = position - udpdate_shopping_item.is_food = False - udpdate_shopping_item.food_id = None - udpdate_shopping_item.quantity = 0.0 - udpdate_shopping_item.checked = item.status == TodoItemStatus.COMPLETED + update_shopping_item.note = stripped_item_summary + update_shopping_item.position = position + update_shopping_item.is_food = False + update_shopping_item.food_id = None + update_shopping_item.quantity = 0.0 + update_shopping_item.checked = item.status == TodoItemStatus.COMPLETED try: await self.coordinator.client.update_shopping_item( - list_item.item_id, udpdate_shopping_item + list_item.item_id, update_shopping_item ) except MealieError as exception: raise HomeAssistantError( diff --git a/tests/components/mealie/test_calendar.py b/tests/components/mealie/test_calendar.py index d11fe5d2354..cca4fcca673 100644 --- a/tests/components/mealie/test_calendar.py +++ b/tests/components/mealie/test_calendar.py @@ -4,9 +4,10 @@ from datetime import date from http import HTTPStatus from unittest.mock import AsyncMock, patch +from aiomealie import MealplanResponse from syrupy.assertion import SnapshotAssertion -from homeassistant.const import Platform +from homeassistant.const import STATE_OFF, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -40,13 +41,28 @@ async def test_entities( mock_mealie_client: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test the API returns the calendar.""" + """Test the calendar entities.""" with patch("homeassistant.components.mealie.PLATFORMS", [Platform.CALENDAR]): await setup_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) +async def test_no_meal_planned( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the calendar handles no meal planned.""" + mock_mealie_client.get_mealplans.return_value = MealplanResponse([]) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("calendar.mealie_dinner").state == STATE_OFF + + async def test_api_events( hass: HomeAssistant, snapshot: SnapshotAssertion, diff --git a/tests/components/mealie/test_todo.py b/tests/components/mealie/test_todo.py index 920cfc47397..e7942887099 100644 --- a/tests/components/mealie/test_todo.py +++ b/tests/components/mealie/test_todo.py @@ -1,9 +1,9 @@ """Tests for the Mealie todo.""" from datetime import timedelta -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, call, patch -from aiomealie import MealieError, ShoppingListsResponse +from aiomealie import MealieError, MutateShoppingItem, ShoppingListsResponse from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion @@ -18,7 +18,7 @@ from homeassistant.components.todo import ( ) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -29,6 +29,7 @@ from tests.common import ( load_fixture, snapshot_platform, ) +from tests.typing import WebSocketGenerator async def test_entities( @@ -45,23 +46,38 @@ async def test_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -async def test_add_todo_list_item( +@pytest.mark.parametrize( + ("service", "data", "method"), + [ + (TodoServices.ADD_ITEM, {ATTR_ITEM: "Soda"}, "add_shopping_item"), + ( + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, + "update_shopping_item", + ), + (TodoServices.REMOVE_ITEM, {ATTR_ITEM: "aubergine"}, "delete_shopping_item"), + ], +) +async def test_todo_actions( hass: HomeAssistant, mock_mealie_client: AsyncMock, mock_config_entry: MockConfigEntry, + service: str, + data: dict[str, str], + method: str, ) -> None: - """Test for adding a To-do Item.""" + """Test todo actions.""" await setup_integration(hass, mock_config_entry) await hass.services.async_call( TODO_DOMAIN, - TodoServices.ADD_ITEM, - {ATTR_ITEM: "Soda"}, + service, + data, target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, blocking=True, ) - mock_mealie_client.add_shopping_item.assert_called_once() + getattr(mock_mealie_client, method).assert_called_once() async def test_add_todo_list_item_error( @@ -74,7 +90,9 @@ async def test_add_todo_list_item_error( mock_mealie_client.add_shopping_item.side_effect = MealieError - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, match="An error occurred adding an item to Supermarket" + ): await hass.services.async_call( TODO_DOMAIN, TodoServices.ADD_ITEM, @@ -84,25 +102,6 @@ async def test_add_todo_list_item_error( ) -async def test_update_todo_list_item( - hass: HomeAssistant, - mock_mealie_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test for updating a To-do Item.""" - await setup_integration(hass, mock_config_entry) - - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.UPDATE_ITEM, - {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, - target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, - blocking=True, - ) - - mock_mealie_client.update_shopping_item.assert_called_once() - - async def test_update_todo_list_item_error( hass: HomeAssistant, mock_mealie_client: AsyncMock, @@ -113,7 +112,9 @@ async def test_update_todo_list_item_error( mock_mealie_client.update_shopping_item.side_effect = MealieError - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, match="An error occurred updating an item in Supermarket" + ): await hass.services.async_call( TODO_DOMAIN, TodoServices.UPDATE_ITEM, @@ -123,23 +124,24 @@ async def test_update_todo_list_item_error( ) -async def test_delete_todo_list_item( +async def test_update_non_existent_item( hass: HomeAssistant, mock_mealie_client: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test for deleting a To-do Item.""" + """Test for updating a non-existent To-do Item.""" await setup_integration(hass, mock_config_entry) - await hass.services.async_call( - TODO_DOMAIN, - TodoServices.REMOVE_ITEM, - {ATTR_ITEM: "aubergine"}, - target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, - blocking=True, - ) - - mock_mealie_client.delete_shopping_item.assert_called_once() + with pytest.raises( + ServiceValidationError, match="Unable to find to-do list item: eggplant" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "eggplant", ATTR_RENAME: "Aubergine", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) async def test_delete_todo_list_item_error( @@ -153,7 +155,9 @@ async def test_delete_todo_list_item_error( mock_mealie_client.delete_shopping_item = AsyncMock() mock_mealie_client.delete_shopping_item.side_effect = MealieError - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, match="An error occurred deleting an item in Supermarket" + ): await hass.services.async_call( TODO_DOMAIN, TodoServices.REMOVE_ITEM, @@ -163,6 +167,172 @@ async def test_delete_todo_list_item_error( ) +async def test_moving_todo_item( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test for moving a To-do Item to place.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "todo/item/move", + "entity_id": "todo.mealie_supermarket", + "uid": "f45430f7-3edf-45a9-a50f-73bb375090be", + "previous_uid": "84d8fd74-8eb0-402e-84b6-71f251bfb7cc", + } + ) + resp = await client.receive_json() + assert resp.get("id") == 1 + assert resp.get("success") + assert resp.get("result") is None + + assert mock_mealie_client.update_shopping_item.call_count == 3 + calls = mock_mealie_client.update_shopping_item.mock_calls + + assert calls[0] == call( + "84d8fd74-8eb0-402e-84b6-71f251bfb7cc", + MutateShoppingItem( + item_id="84d8fd74-8eb0-402e-84b6-71f251bfb7cc", + list_id="9ce096fe-ded2-4077-877d-78ba450ab13e", + note="", + display=None, + checked=False, + position=0, + is_food=True, + disable_amount=None, + quantity=1.0, + label_id=None, + food_id="09322430-d24c-4b1a-abb6-22b6ed3a88f5", + unit_id="7bf539d4-fc78-48bc-b48e-c35ccccec34a", + ), + ) + + assert calls[1] == call( + "f45430f7-3edf-45a9-a50f-73bb375090be", + MutateShoppingItem( + item_id="f45430f7-3edf-45a9-a50f-73bb375090be", + list_id="9ce096fe-ded2-4077-877d-78ba450ab13e", + note="Apples", + display=None, + checked=False, + position=1, + is_food=False, + disable_amount=None, + quantity=2.0, + label_id=None, + food_id=None, + unit_id=None, + ), + ) + + assert calls[2] == call( + "69913b9a-7c75-4935-abec-297cf7483f88", + MutateShoppingItem( + item_id="69913b9a-7c75-4935-abec-297cf7483f88", + list_id="9ce096fe-ded2-4077-877d-78ba450ab13e", + note="", + display=None, + checked=False, + position=2, + is_food=True, + disable_amount=None, + quantity=0.0, + label_id=None, + food_id="96801494-4e26-4148-849a-8155deb76327", + unit_id=None, + ), + ) + + +async def test_not_moving_todo_item( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test for moving a To-do Item to the same place.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "todo/item/move", + "entity_id": "todo.mealie_supermarket", + "uid": "f45430f7-3edf-45a9-a50f-73bb375090be", + "previous_uid": "f45430f7-3edf-45a9-a50f-73bb375090be", + } + ) + resp = await client.receive_json() + assert resp.get("id") == 1 + assert resp.get("success") + assert resp.get("result") is None + + assert mock_mealie_client.update_shopping_item.call_count == 0 + + +async def test_moving_todo_item_invalid_uid( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test for moving a To-do Item to place with invalid UID.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "todo/item/move", + "entity_id": "todo.mealie_supermarket", + "uid": "cheese", + } + ) + resp = await client.receive_json() + assert resp.get("id") == 1 + assert resp.get("success") is False + assert resp.get("result") is None + assert resp["error"]["code"] == "failed" + assert resp["error"]["message"] == "Item cheese not found" + + assert mock_mealie_client.update_shopping_item.call_count == 0 + + +async def test_moving_todo_item_invalid_previous_uid( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test for moving a To-do Item to place with invalid previous UID.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "todo/item/move", + "entity_id": "todo.mealie_supermarket", + "uid": "f45430f7-3edf-45a9-a50f-73bb375090be", + "previous_uid": "cheese", + } + ) + resp = await client.receive_json() + assert resp.get("id") == 1 + assert resp.get("success") is False + assert resp.get("result") is None + assert resp["error"]["code"] == "failed" + assert resp["error"]["message"] == "Item cheese not found" + + assert mock_mealie_client.update_shopping_item.call_count == 0 + + async def test_runtime_management( hass: HomeAssistant, mock_mealie_client: AsyncMock, From b6819cbff32033ce87d9560526c4dfb511370f9d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 20 Dec 2024 13:13:21 -1000 Subject: [PATCH 546/677] Bump PySwitchbot to 0.55.2 (#133690) changelog: https://github.com/sblibs/pySwitchbot/compare/0.54.0...0.55.2 --- homeassistant/components/switchbot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 5a328650aca..5c91a6e20a5 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.54.0"] + "requirements": ["PySwitchbot==0.55.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 75357a7d22e..2cae5d93bf8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.54.0 +PySwitchbot==0.55.2 # homeassistant.components.switchmate PySwitchmate==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 88b30377514..a286cc9a0da 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.54.0 +PySwitchbot==0.55.2 # homeassistant.components.syncthru PySyncThru==0.7.10 From 861d9b334160c865255c43d77787ccfaac43f5cd Mon Sep 17 00:00:00 2001 From: greyeee <62752780+greyeee@users.noreply.github.com> Date: Sat, 21 Dec 2024 07:49:30 +0800 Subject: [PATCH 547/677] Add initial support for SwitchBot relay switch (#130863) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Support relay switch * 更新下版本 * add test case * change to async_abort * Upgrade PySwitchbot to 0.53.2 * change unit to volt * upgrade pySwitchbot dependency * bump lib, will be split into a seperate PR after testing is finished * dry * dry * dry * dry * dry * dry * dry * update tests * fixes * fixes * cleanups * fixes * fixes * fixes * bump again --------- Co-authored-by: J. Nick Koston Co-authored-by: Joost Lekkerkerker --- .../components/switchbot/__init__.py | 9 +- .../components/switchbot/config_flow.py | 43 +-- homeassistant/components/switchbot/const.py | 21 +- homeassistant/components/switchbot/sensor.py | 14 + .../components/switchbot/strings.json | 14 +- tests/components/switchbot/__init__.py | 20 ++ .../components/switchbot/test_config_flow.py | 270 ++++++++++++++++-- tests/components/switchbot/test_sensor.py | 50 +++- 8 files changed, 379 insertions(+), 62 deletions(-) diff --git a/homeassistant/components/switchbot/__init__.py b/homeassistant/components/switchbot/__init__.py index c2b4b2ad736..522258c2a55 100644 --- a/homeassistant/components/switchbot/__init__.py +++ b/homeassistant/components/switchbot/__init__.py @@ -24,6 +24,7 @@ from .const import ( CONF_RETRY_COUNT, CONNECTABLE_SUPPORTED_MODEL_TYPES, DEFAULT_RETRY_COUNT, + ENCRYPTED_MODELS, HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL, SupportedModels, ) @@ -61,6 +62,8 @@ PLATFORMS_BY_TYPE = { Platform.SENSOR, ], SupportedModels.HUB2.value: [Platform.SENSOR], + SupportedModels.RELAY_SWITCH_1PM.value: [Platform.SWITCH, Platform.SENSOR], + SupportedModels.RELAY_SWITCH_1.value: [Platform.SWITCH], } CLASS_BY_DEVICE = { SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight, @@ -73,6 +76,8 @@ CLASS_BY_DEVICE = { SupportedModels.LOCK.value: switchbot.SwitchbotLock, SupportedModels.LOCK_PRO.value: switchbot.SwitchbotLock, SupportedModels.BLIND_TILT.value: switchbot.SwitchbotBlindTilt, + SupportedModels.RELAY_SWITCH_1PM.value: switchbot.SwitchbotRelaySwitch, + SupportedModels.RELAY_SWITCH_1.value: switchbot.SwitchbotRelaySwitch, } @@ -116,9 +121,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: SwitchbotConfigEntry) -> ) cls = CLASS_BY_DEVICE.get(sensor_type, switchbot.SwitchbotDevice) - if cls is switchbot.SwitchbotLock: + if switchbot_model in ENCRYPTED_MODELS: try: - device = switchbot.SwitchbotLock( + device = cls( device=ble_device, key_id=entry.data.get(CONF_KEY_ID), encryption_key=entry.data.get(CONF_ENCRYPTION_KEY), diff --git a/homeassistant/components/switchbot/config_flow.py b/homeassistant/components/switchbot/config_flow.py index a0e45169770..fc2d9f491ac 100644 --- a/homeassistant/components/switchbot/config_flow.py +++ b/homeassistant/components/switchbot/config_flow.py @@ -10,7 +10,7 @@ from switchbot import ( SwitchBotAdvertisement, SwitchbotApiError, SwitchbotAuthenticationError, - SwitchbotLock, + SwitchbotModel, parse_advertisement_data, ) import voluptuous as vol @@ -44,8 +44,9 @@ from .const import ( DEFAULT_LOCK_NIGHTLATCH, DEFAULT_RETRY_COUNT, DOMAIN, + ENCRYPTED_MODELS, + ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS, NON_CONNECTABLE_SUPPORTED_MODEL_TYPES, - SUPPORTED_LOCK_MODELS, SUPPORTED_MODEL_TYPES, SupportedModels, ) @@ -112,8 +113,8 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): "name": data["modelFriendlyName"], "address": short_address(discovery_info.address), } - if model_name in SUPPORTED_LOCK_MODELS: - return await self.async_step_lock_choose_method() + if model_name in ENCRYPTED_MODELS: + return await self.async_step_encrypted_choose_method() if self._discovered_adv.data["isEncrypted"]: return await self.async_step_password() return await self.async_step_confirm() @@ -171,7 +172,7 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_lock_auth( + async def async_step_encrypted_auth( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the SwitchBot API auth step.""" @@ -179,8 +180,10 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): assert self._discovered_adv is not None description_placeholders = {} if user_input is not None: + model: SwitchbotModel = self._discovered_adv.data["modelName"] + cls = ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS[model] try: - key_details = await SwitchbotLock.async_retrieve_encryption_key( + key_details = await cls.async_retrieve_encryption_key( async_get_clientsession(self.hass), self._discovered_adv.address, user_input[CONF_USERNAME], @@ -198,11 +201,11 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): errors = {"base": "auth_failed"} description_placeholders = {"error_detail": str(ex)} else: - return await self.async_step_lock_key(key_details) + return await self.async_step_encrypted_key(key_details) user_input = user_input or {} return self.async_show_form( - step_id="lock_auth", + step_id="encrypted_auth", errors=errors, data_schema=vol.Schema( { @@ -218,32 +221,34 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_lock_choose_method( + async def async_step_encrypted_choose_method( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the SwitchBot API chose method step.""" assert self._discovered_adv is not None return self.async_show_menu( - step_id="lock_choose_method", - menu_options=["lock_auth", "lock_key"], + step_id="encrypted_choose_method", + menu_options=["encrypted_auth", "encrypted_key"], description_placeholders={ "name": name_from_discovery(self._discovered_adv), }, ) - async def async_step_lock_key( + async def async_step_encrypted_key( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the encryption key step.""" errors = {} assert self._discovered_adv is not None if user_input is not None: - if not await SwitchbotLock.verify_encryption_key( + model: SwitchbotModel = self._discovered_adv.data["modelName"] + cls = ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS[model] + if not await cls.verify_encryption_key( self._discovered_adv.device, user_input[CONF_KEY_ID], user_input[CONF_ENCRYPTION_KEY], - model=self._discovered_adv.data["modelName"], + model=model, ): errors = { "base": "encryption_key_invalid", @@ -252,7 +257,7 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): return await self._async_create_entry_from_discovery(user_input) return self.async_show_form( - step_id="lock_key", + step_id="encrypted_key", errors=errors, data_schema=vol.Schema( { @@ -309,8 +314,8 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: device_adv = self._discovered_advs[user_input[CONF_ADDRESS]] await self._async_set_device(device_adv) - if device_adv.data.get("modelName") in SUPPORTED_LOCK_MODELS: - return await self.async_step_lock_choose_method() + if device_adv.data.get("modelName") in ENCRYPTED_MODELS: + return await self.async_step_encrypted_choose_method() if device_adv.data["isEncrypted"]: return await self.async_step_password() return await self._async_create_entry_from_discovery(user_input) @@ -321,8 +326,8 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): # or simply confirm it device_adv = list(self._discovered_advs.values())[0] await self._async_set_device(device_adv) - if device_adv.data.get("modelName") in SUPPORTED_LOCK_MODELS: - return await self.async_step_lock_choose_method() + if device_adv.data.get("modelName") in ENCRYPTED_MODELS: + return await self.async_step_encrypted_choose_method() if device_adv.data["isEncrypted"]: return await self.async_step_password() return await self.async_step_confirm() diff --git a/homeassistant/components/switchbot/const.py b/homeassistant/components/switchbot/const.py index b8cf4e8e1ab..383fd6b03b6 100644 --- a/homeassistant/components/switchbot/const.py +++ b/homeassistant/components/switchbot/const.py @@ -2,6 +2,7 @@ from enum import StrEnum +import switchbot from switchbot import SwitchbotModel DOMAIN = "switchbot" @@ -30,6 +31,8 @@ class SupportedModels(StrEnum): LOCK_PRO = "lock_pro" BLIND_TILT = "blind_tilt" HUB2 = "hub2" + RELAY_SWITCH_1PM = "relay_switch_1pm" + RELAY_SWITCH_1 = "relay_switch_1" CONNECTABLE_SUPPORTED_MODEL_TYPES = { @@ -44,6 +47,8 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = { SwitchbotModel.LOCK_PRO: SupportedModels.LOCK_PRO, SwitchbotModel.BLIND_TILT: SupportedModels.BLIND_TILT, SwitchbotModel.HUB2: SupportedModels.HUB2, + SwitchbotModel.RELAY_SWITCH_1PM: SupportedModels.RELAY_SWITCH_1PM, + SwitchbotModel.RELAY_SWITCH_1: SupportedModels.RELAY_SWITCH_1, } NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = { @@ -59,7 +64,21 @@ SUPPORTED_MODEL_TYPES = ( CONNECTABLE_SUPPORTED_MODEL_TYPES | NON_CONNECTABLE_SUPPORTED_MODEL_TYPES ) -SUPPORTED_LOCK_MODELS = {SwitchbotModel.LOCK, SwitchbotModel.LOCK_PRO} +ENCRYPTED_MODELS = { + SwitchbotModel.RELAY_SWITCH_1, + SwitchbotModel.RELAY_SWITCH_1PM, + SwitchbotModel.LOCK, + SwitchbotModel.LOCK_PRO, +} + +ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[ + SwitchbotModel, switchbot.SwitchbotEncryptedDevice +] = { + SwitchbotModel.LOCK: switchbot.SwitchbotLock, + SwitchbotModel.LOCK_PRO: switchbot.SwitchbotLock, + SwitchbotModel.RELAY_SWITCH_1PM: switchbot.SwitchbotRelaySwitch, + SwitchbotModel.RELAY_SWITCH_1: switchbot.SwitchbotRelaySwitch, +} HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = { str(v): k for k, v in SUPPORTED_MODEL_TYPES.items() diff --git a/homeassistant/components/switchbot/sensor.py b/homeassistant/components/switchbot/sensor.py index fd3de3e31e9..9787521a5e9 100644 --- a/homeassistant/components/switchbot/sensor.py +++ b/homeassistant/components/switchbot/sensor.py @@ -14,6 +14,8 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfElectricCurrent, + UnitOfElectricPotential, UnitOfPower, UnitOfTemperature, ) @@ -82,6 +84,18 @@ SENSOR_TYPES: dict[str, SensorEntityDescription] = { state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.POWER, ), + "current": SensorEntityDescription( + key="current", + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CURRENT, + ), + "voltage": SensorEntityDescription( + key="voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.VOLTAGE, + ), } diff --git a/homeassistant/components/switchbot/strings.json b/homeassistant/components/switchbot/strings.json index 80ca32d4826..2a5ddaa0cba 100644 --- a/homeassistant/components/switchbot/strings.json +++ b/homeassistant/components/switchbot/strings.json @@ -16,25 +16,25 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "lock_key": { + "encrypted_key": { "description": "The {name} device requires encryption key, details on how to obtain it can be found in the documentation.", "data": { "key_id": "Key ID", "encryption_key": "Encryption key" } }, - "lock_auth": { - "description": "Please provide your SwitchBot app username and password. This data won't be saved and only used to retrieve your locks encryption key. Usernames and passwords are case sensitive.", + "encrypted_auth": { + "description": "Please provide your SwitchBot app username and password. This data won't be saved and only used to retrieve your device's encryption key. Usernames and passwords are case sensitive.", "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" } }, - "lock_choose_method": { - "description": "A SwitchBot lock can be set up in Home Assistant in two different ways.\n\nYou can enter the key id and encryption key yourself, or Home Assistant can import them from your SwitchBot account.", + "encrypted_choose_method": { + "description": "An encrypted SwitchBot device can be set up in Home Assistant in two different ways.\n\nYou can enter the key id and encryption key yourself, or Home Assistant can import them from your SwitchBot account.", "menu_options": { - "lock_auth": "SwitchBot account (recommended)", - "lock_key": "Enter lock encryption key manually" + "encrypted_auth": "SwitchBot account (recommended)", + "encrypted_key": "Enter encryption key manually" } } }, diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index bd3985ff062..c5ecebf21b3 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -230,3 +230,23 @@ WOMETERTHPC_SERVICE_INFO = BluetoothServiceInfoBleak( connectable=True, tx_power=-127, ) + +WORELAY_SWITCH_1PM_SERVICE_INFO = BluetoothServiceInfoBleak( + name="W1080000", + manufacturer_data={2409: b"$X|\x0866G\x81\x00\x00\x001\x00\x00\x00\x00"}, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"<\x00\x00\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + address="AA:BB:CC:DD:EE:FF", + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="W1080000", + manufacturer_data={2409: b"$X|\x0866G\x81\x00\x00\x001\x00\x00\x00\x00"}, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"<\x00\x00\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:FF", "W1080000"), + time=0, + connectable=True, + tx_power=-127, +) diff --git a/tests/components/switchbot/test_config_flow.py b/tests/components/switchbot/test_config_flow.py index b0fba2a5f18..3caa2a1f0df 100644 --- a/tests/components/switchbot/test_config_flow.py +++ b/tests/components/switchbot/test_config_flow.py @@ -30,6 +30,7 @@ from . import ( WOHAND_SERVICE_INFO, WOHAND_SERVICE_INFO_NOT_CONNECTABLE, WOLOCK_SERVICE_INFO, + WORELAY_SWITCH_1PM_SERVICE_INFO, WOSENSORTH_SERVICE_INFO, init_integration, patch_async_setup_entry, @@ -95,7 +96,7 @@ async def test_bluetooth_discovery_requires_password(hass: HomeAssistant) -> Non assert len(mock_setup_entry.mock_calls) == 1 -async def test_bluetooth_discovery_lock_key(hass: HomeAssistant) -> None: +async def test_bluetooth_discovery_encrypted_key(hass: HomeAssistant) -> None: """Test discovery via bluetooth with a lock.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -103,18 +104,18 @@ async def test_bluetooth_discovery_lock_key(hass: HomeAssistant) -> None: data=WOLOCK_SERVICE_INFO, ) assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "lock_choose_method" + assert result["step_id"] == "encrypted_choose_method" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"next_step_id": "lock_key"} + result["flow_id"], user_input={"next_step_id": "encrypted_key"} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_key" + assert result["step_id"] == "encrypted_key" assert result["errors"] == {} with patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=False, ): result = await hass.config_entries.flow.async_configure( @@ -127,13 +128,13 @@ async def test_bluetooth_discovery_lock_key(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_key" + assert result["step_id"] == "encrypted_key" assert result["errors"] == {"base": "encryption_key_invalid"} with ( patch_async_setup_entry() as mock_setup_entry, patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=True, ), ): @@ -158,6 +159,51 @@ async def test_bluetooth_discovery_lock_key(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 +async def test_bluetooth_discovery_key(hass: HomeAssistant) -> None: + """Test discovery via bluetooth with a encrypted device.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_BLUETOOTH}, + data=WORELAY_SWITCH_1PM_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "encrypted_choose_method" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "encrypted_key"} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "encrypted_key" + assert result["errors"] == {} + + with ( + patch_async_setup_entry() as mock_setup_entry, + patch( + "switchbot.SwitchbotRelaySwitch.verify_encryption_key", return_value=True + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Relay Switch 1PM EEFF" + assert result["data"] == { + CONF_ADDRESS: "AA:BB:CC:DD:EE:FF", + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + CONF_SENSOR_TYPE: "relay_switch_1pm", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_bluetooth_discovery_already_setup(hass: HomeAssistant) -> None: """Test discovery via bluetooth with a valid device when already setup.""" entry = MockConfigEntry( @@ -400,7 +446,7 @@ async def test_user_setup_single_bot_with_password(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_setup_wolock_key(hass: HomeAssistant) -> None: +async def test_user_setup_woencrypted_key(hass: HomeAssistant) -> None: """Test the user initiated form for a lock.""" with patch( @@ -411,18 +457,18 @@ async def test_user_setup_wolock_key(hass: HomeAssistant) -> None: DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "lock_choose_method" + assert result["step_id"] == "encrypted_choose_method" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"next_step_id": "lock_key"} + result["flow_id"], user_input={"next_step_id": "encrypted_key"} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_key" + assert result["step_id"] == "encrypted_key" assert result["errors"] == {} with patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=False, ): result = await hass.config_entries.flow.async_configure( @@ -435,13 +481,13 @@ async def test_user_setup_wolock_key(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_key" + assert result["step_id"] == "encrypted_key" assert result["errors"] == {"base": "encryption_key_invalid"} with ( patch_async_setup_entry() as mock_setup_entry, patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=True, ), ): @@ -466,7 +512,7 @@ async def test_user_setup_wolock_key(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_setup_wolock_auth(hass: HomeAssistant) -> None: +async def test_user_setup_woencrypted_auth(hass: HomeAssistant) -> None: """Test the user initiated form for a lock.""" with patch( @@ -477,18 +523,18 @@ async def test_user_setup_wolock_auth(hass: HomeAssistant) -> None: DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "lock_choose_method" + assert result["step_id"] == "encrypted_choose_method" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"next_step_id": "lock_auth"} + result["flow_id"], user_input={"next_step_id": "encrypted_auth"} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_auth" + assert result["step_id"] == "encrypted_auth" assert result["errors"] == {} with patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.async_retrieve_encryption_key", + "switchbot.SwitchbotLock.async_retrieve_encryption_key", side_effect=SwitchbotAuthenticationError("error from api"), ): result = await hass.config_entries.flow.async_configure( @@ -500,18 +546,18 @@ async def test_user_setup_wolock_auth(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_auth" + assert result["step_id"] == "encrypted_auth" assert result["errors"] == {"base": "auth_failed"} assert "error from api" in result["description_placeholders"]["error_detail"] with ( patch_async_setup_entry() as mock_setup_entry, patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=True, ), patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.async_retrieve_encryption_key", + "switchbot.SwitchbotLock.async_retrieve_encryption_key", return_value={ CONF_KEY_ID: "ff", CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", @@ -539,7 +585,9 @@ async def test_user_setup_wolock_auth(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_setup_wolock_auth_switchbot_api_down(hass: HomeAssistant) -> None: +async def test_user_setup_woencrypted_auth_switchbot_api_down( + hass: HomeAssistant, +) -> None: """Test the user initiated form for a lock when the switchbot api is down.""" with patch( @@ -550,18 +598,18 @@ async def test_user_setup_wolock_auth_switchbot_api_down(hass: HomeAssistant) -> DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "lock_choose_method" + assert result["step_id"] == "encrypted_choose_method" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"next_step_id": "lock_auth"} + result["flow_id"], user_input={"next_step_id": "encrypted_auth"} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_auth" + assert result["step_id"] == "encrypted_auth" assert result["errors"] == {} with patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.async_retrieve_encryption_key", + "switchbot.SwitchbotLock.async_retrieve_encryption_key", side_effect=SwitchbotAccountConnectionError("Switchbot API down"), ): result = await hass.config_entries.flow.async_configure( @@ -600,20 +648,20 @@ async def test_user_setup_wolock_or_bot(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "lock_choose_method" + assert result["step_id"] == "encrypted_choose_method" result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={"next_step_id": "lock_key"} + result["flow_id"], user_input={"next_step_id": "encrypted_key"} ) await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "lock_key" + assert result["step_id"] == "encrypted_key" assert result["errors"] == {} with ( patch_async_setup_entry() as mock_setup_entry, patch( - "homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key", + "switchbot.SwitchbotLock.verify_encryption_key", return_value=True, ), ): @@ -845,3 +893,163 @@ async def test_options_flow_lock_pro(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 0 assert entry.options[CONF_LOCK_NIGHTLATCH] is True + + +async def test_user_setup_worelay_switch_1pm_key(hass: HomeAssistant) -> None: + """Test the user initiated form for a relay switch 1pm.""" + + with patch( + "homeassistant.components.switchbot.config_flow.async_discovered_service_info", + return_value=[WORELAY_SWITCH_1PM_SERVICE_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "encrypted_choose_method" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "encrypted_key"} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "encrypted_key" + assert result["errors"] == {} + + with ( + patch_async_setup_entry() as mock_setup_entry, + patch( + "switchbot.SwitchbotRelaySwitch.verify_encryption_key", return_value=True + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Relay Switch 1PM EEFF" + assert result["data"] == { + CONF_ADDRESS: "AA:BB:CC:DD:EE:FF", + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + CONF_SENSOR_TYPE: "relay_switch_1pm", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_setup_worelay_switch_1pm_auth(hass: HomeAssistant) -> None: + """Test the user initiated form for a relay switch 1pm.""" + + with patch( + "homeassistant.components.switchbot.config_flow.async_discovered_service_info", + return_value=[WORELAY_SWITCH_1PM_SERVICE_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "encrypted_choose_method" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "encrypted_auth"} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "encrypted_auth" + assert result["errors"] == {} + + with patch( + "switchbot.SwitchbotRelaySwitch.async_retrieve_encryption_key", + side_effect=SwitchbotAuthenticationError("error from api"), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "", + CONF_PASSWORD: "", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "encrypted_auth" + assert result["errors"] == {"base": "auth_failed"} + assert "error from api" in result["description_placeholders"]["error_detail"] + + with ( + patch_async_setup_entry() as mock_setup_entry, + patch( + "switchbot.SwitchbotRelaySwitch.async_retrieve_encryption_key", + return_value={ + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + }, + ), + patch( + "switchbot.SwitchbotRelaySwitch.verify_encryption_key", return_value=True + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "username", + CONF_PASSWORD: "password", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Relay Switch 1PM EEFF" + assert result["data"] == { + CONF_ADDRESS: "AA:BB:CC:DD:EE:FF", + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + CONF_SENSOR_TYPE: "relay_switch_1pm", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_setup_worelay_switch_1pm_auth_switchbot_api_down( + hass: HomeAssistant, +) -> None: + """Test the user initiated form for a relay switch 1pm when the switchbot api is down.""" + + with patch( + "homeassistant.components.switchbot.config_flow.async_discovered_service_info", + return_value=[WORELAY_SWITCH_1PM_SERVICE_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "encrypted_choose_method" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={"next_step_id": "encrypted_auth"} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "encrypted_auth" + assert result["errors"] == {} + + with patch( + "switchbot.SwitchbotRelaySwitch.async_retrieve_encryption_key", + side_effect=SwitchbotAccountConnectionError("Switchbot API down"), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "", + CONF_PASSWORD: "", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "api_error" + assert result["description_placeholders"] == {"error_detail": "Switchbot API down"} diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index 3adeaef936c..205bb739508 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -1,9 +1,15 @@ """Test the switchbot sensors.""" +from unittest.mock import patch + import pytest from homeassistant.components.sensor import ATTR_STATE_CLASS -from homeassistant.components.switchbot.const import DOMAIN +from homeassistant.components.switchbot.const import ( + CONF_ENCRYPTION_KEY, + CONF_KEY_ID, + DOMAIN, +) from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, @@ -15,7 +21,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from . import WOHAND_SERVICE_INFO, WOMETERTHPC_SERVICE_INFO +from . import ( + WOHAND_SERVICE_INFO, + WOMETERTHPC_SERVICE_INFO, + WORELAY_SWITCH_1PM_SERVICE_INFO, +) from tests.common import MockConfigEntry from tests.components.bluetooth import inject_bluetooth_service_info @@ -105,3 +115,39 @@ async def test_co2_sensor(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_relay_switch_1pm_power_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the power sensor.""" + await async_setup_component(hass, DOMAIN, {}) + inject_bluetooth_service_info(hass, WORELAY_SWITCH_1PM_SERVICE_INFO) + + with patch( + "switchbot.SwitchbotRelaySwitch.update", + return_value=None, + ): + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_NAME: "test-name", + CONF_SENSOR_TYPE: "relay_switch_1pm", + CONF_KEY_ID: "ff", + CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff", + }, + unique_id="aabbccddeeaa", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + power_sensor = hass.states.get("sensor.test_name_power") + power_sensor_attrs = power_sensor.attributes + assert power_sensor.state == "4.9" + assert power_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Power" + assert power_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "W" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() From e38a85da6451203ba678e31ece6f305755257366 Mon Sep 17 00:00:00 2001 From: Kevin Worrel <37058192+dieselrabbit@users.noreply.github.com> Date: Sat, 21 Dec 2024 00:25:21 -0800 Subject: [PATCH 548/677] Add entity translation strings for ScreenLogic (#130708) * Add translation strings for entities * Translation key updates * Mach original name * Remove state translations * Sentence case entity names * Fix tests * Add missing translation_key for Air temperature * Revert inadvertant entity_id change on last_dose_time sensors * Update homeassistant/components/screenlogic/strings.json Lowercase 'entry' Co-authored-by: Joost Lekkerkerker * Define translations for each circuit delay sensor --------- Co-authored-by: Joost Lekkerkerker --- .../components/screenlogic/binary_sensor.py | 27 ++++ .../components/screenlogic/climate.py | 2 +- .../components/screenlogic/entity.py | 3 +- .../components/screenlogic/number.py | 6 + .../components/screenlogic/sensor.py | 33 ++++ .../components/screenlogic/strings.json | 143 +++++++++++++++++- tests/components/screenlogic/test_init.py | 2 +- 7 files changed, 207 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/screenlogic/binary_sensor.py b/homeassistant/components/screenlogic/binary_sensor.py index fda1c348edf..4a178c60d81 100644 --- a/homeassistant/components/screenlogic/binary_sensor.py +++ b/homeassistant/components/screenlogic/binary_sensor.py @@ -49,26 +49,31 @@ SUPPORTED_CORE_SENSORS = [ data_root=(DEVICE.CONTROLLER, GROUP.SENSOR), key=VALUE.ACTIVE_ALERT, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="active_alert", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.STATUS_CHANGED, data_root=(DEVICE.CONTROLLER, GROUP.SENSOR), key=VALUE.CLEANER_DELAY, + translation_key="cleaner_delay", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.STATUS_CHANGED, data_root=(DEVICE.CONTROLLER, GROUP.SENSOR), key=VALUE.FREEZE_MODE, + translation_key="freeze_mode", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.STATUS_CHANGED, data_root=(DEVICE.CONTROLLER, GROUP.SENSOR), key=VALUE.POOL_DELAY, + translation_key="pool_delay", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.STATUS_CHANGED, data_root=(DEVICE.CONTROLLER, GROUP.SENSOR), key=VALUE.SPA_DELAY, + translation_key="spa_delay", ), ] @@ -85,75 +90,96 @@ SUPPORTED_INTELLICHEM_SENSORS = [ data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.FLOW_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="flow_alarm", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.ORP_HIGH_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_high_alarm", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.ORP_LOW_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_low_alarm", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.ORP_SUPPLY_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_supply_alarm", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.PH_HIGH_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_high_alarm", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.PH_LOW_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_low_alarm", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.PH_SUPPLY_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="chem_supply_alarm", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALARM), key=VALUE.PROBE_FAULT_ALARM, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="probe_fault_alarm", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALERT), key=VALUE.ORP_LIMIT, + translation_key="chem_limit", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALERT), key=VALUE.PH_LIMIT, + translation_key="chem_limit", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.ALERT), key=VALUE.PH_LOCKOUT, + translation_key="ph_lockout", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.WATER_BALANCE), key=VALUE.CORROSIVE, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="corosive", ), ScreenLogicPushBinarySensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.WATER_BALANCE), key=VALUE.SCALING, device_class=BinarySensorDeviceClass.PROBLEM, + translation_key="scaling", ), ] @@ -161,6 +187,7 @@ SUPPORTED_SCG_SENSORS = [ ScreenLogicBinarySensorDescription( data_root=(DEVICE.SCG, GROUP.SENSOR), key=VALUE.STATE, + translation_key="scg_state", ) ] diff --git a/homeassistant/components/screenlogic/climate.py b/homeassistant/components/screenlogic/climate.py index 08300900f5d..c0cff8d511b 100644 --- a/homeassistant/components/screenlogic/climate.py +++ b/homeassistant/components/screenlogic/climate.py @@ -56,6 +56,7 @@ async def async_setup_entry( subscription_code=CODE.STATUS_CHANGED, data_root=(DEVICE.BODY,), key=body_index, + translation_key=f"body_{body_index}", ), ) for body_index in gateway.get_data(DEVICE.BODY) @@ -97,7 +98,6 @@ class ScreenLogicClimate(ScreenLogicPushEntity, ClimateEntity, RestoreEntity): self._attr_min_temp = self.entity_data[ATTR.MIN_SETPOINT] self._attr_max_temp = self.entity_data[ATTR.MAX_SETPOINT] - self._attr_name = self.entity_data[VALUE.HEAT_STATE][ATTR.NAME] self._last_preset = None @property diff --git a/homeassistant/components/screenlogic/entity.py b/homeassistant/components/screenlogic/entity.py index 0f7530b7289..746abc2fde6 100644 --- a/homeassistant/components/screenlogic/entity.py +++ b/homeassistant/components/screenlogic/entity.py @@ -55,7 +55,8 @@ class ScreenLogicEntity(CoordinatorEntity[ScreenlogicDataUpdateCoordinator]): self._data_path = (*self.entity_description.data_root, self._data_key) mac = self.mac self._attr_unique_id = f"{mac}_{generate_unique_id(*self._data_path)}" - self._attr_name = self.entity_data[ATTR.NAME] + if not entity_description.translation_key: + self._attr_name = self.entity_data[ATTR.NAME] assert mac is not None self._attr_device_info = DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, mac)}, diff --git a/homeassistant/components/screenlogic/number.py b/homeassistant/components/screenlogic/number.py index d0eb6a71ec8..3634147e509 100644 --- a/homeassistant/components/screenlogic/number.py +++ b/homeassistant/components/screenlogic/number.py @@ -57,6 +57,7 @@ SUPPORTED_INTELLICHEM_NUMBERS = [ key=VALUE.CALCIUM_HARDNESS, entity_category=EntityCategory.CONFIG, mode=NumberMode.BOX, + translation_key="calcium_hardness", ), ScreenLogicPushNumberDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -64,6 +65,7 @@ SUPPORTED_INTELLICHEM_NUMBERS = [ key=VALUE.CYA, entity_category=EntityCategory.CONFIG, mode=NumberMode.BOX, + translation_key="cya", ), ScreenLogicPushNumberDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -71,6 +73,7 @@ SUPPORTED_INTELLICHEM_NUMBERS = [ key=VALUE.TOTAL_ALKALINITY, entity_category=EntityCategory.CONFIG, mode=NumberMode.BOX, + translation_key="total_alkalinity", ), ScreenLogicPushNumberDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -78,6 +81,7 @@ SUPPORTED_INTELLICHEM_NUMBERS = [ key=VALUE.SALT_TDS_PPM, entity_category=EntityCategory.CONFIG, mode=NumberMode.BOX, + translation_key="salt_tds_ppm", ), ] @@ -86,11 +90,13 @@ SUPPORTED_SCG_NUMBERS = [ data_root=(DEVICE.SCG, GROUP.CONFIGURATION), key=VALUE.POOL_SETPOINT, entity_category=EntityCategory.CONFIG, + translation_key="pool_setpoint", ), ScreenLogicNumberDescription( data_root=(DEVICE.SCG, GROUP.CONFIGURATION), key=VALUE.SPA_SETPOINT, entity_category=EntityCategory.CONFIG, + translation_key="spa_setpoint", ), ] diff --git a/homeassistant/components/screenlogic/sensor.py b/homeassistant/components/screenlogic/sensor.py index c580204221f..6ae6e802859 100644 --- a/homeassistant/components/screenlogic/sensor.py +++ b/homeassistant/components/screenlogic/sensor.py @@ -58,6 +58,7 @@ SUPPORTED_CORE_SENSORS = [ key=VALUE.AIR_TEMPERATURE, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, + translation_key="air_temperature", ), ] @@ -97,12 +98,16 @@ SUPPORTED_INTELLICHEM_SENSORS = [ data_root=(DEVICE.INTELLICHEM, GROUP.SENSOR), key=VALUE.ORP_NOW, state_class=SensorStateClass.MEASUREMENT, + translation_key="chem_now", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.SENSOR), key=VALUE.PH_NOW, state_class=SensorStateClass.MEASUREMENT, + translation_key="chem_now", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -110,6 +115,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.ORP_SUPPLY_LEVEL, state_class=SensorStateClass.MEASUREMENT, value_mod=lambda val: int(val) - 1, + translation_key="chem_supply_level", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -117,6 +124,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.PH_SUPPLY_LEVEL, state_class=SensorStateClass.MEASUREMENT, value_mod=lambda val: int(val) - 1, + translation_key="chem_supply_level", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -124,46 +133,56 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.PH_PROBE_WATER_TEMP, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, + translation_key="ph_probe_water_temp", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.SENSOR), key=VALUE.SATURATION, state_class=SensorStateClass.MEASUREMENT, + translation_key="saturation", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.CALCIUM_HARDNESS, entity_registry_enabled_default=False, # Superseded by number entity + translation_key="calcium_hardness", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.CYA, entity_registry_enabled_default=False, # Superseded by number entity + translation_key="cya", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.ORP_SETPOINT, + translation_key="chem_setpoint", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.PH_SETPOINT, + translation_key="chem_setpoint", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.TOTAL_ALKALINITY, entity_registry_enabled_default=False, # Superseded by number entity + translation_key="total_alkalinity", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, data_root=(DEVICE.INTELLICHEM, GROUP.CONFIGURATION), key=VALUE.SALT_TDS_PPM, entity_registry_enabled_default=False, # Superseded by number entity + translation_key="salt_tds_ppm", ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -172,6 +191,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ device_class=SensorDeviceClass.ENUM, options=["Dosing", "Mixing", "Monitoring"], value_mod=lambda val: DOSE_STATE(val).title, + translation_key="chem_dose_state", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -179,6 +200,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.ORP_LAST_DOSE_TIME, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="chem_last_dose_time", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -186,6 +209,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.ORP_LAST_DOSE_VOLUME, device_class=SensorDeviceClass.VOLUME, state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="chem_last_dose_volume", + translation_placeholders={"chem": "ORP"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -194,6 +219,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ device_class=SensorDeviceClass.ENUM, options=["Dosing", "Mixing", "Monitoring"], value_mod=lambda val: DOSE_STATE(val).title, + translation_key="chem_dose_state", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -201,6 +228,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.PH_LAST_DOSE_TIME, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="chem_last_dose_time", + translation_placeholders={"chem": "pH"}, ), ScreenLogicPushSensorDescription( subscription_code=CODE.CHEMISTRY_CHANGED, @@ -208,6 +237,8 @@ SUPPORTED_INTELLICHEM_SENSORS = [ key=VALUE.PH_LAST_DOSE_VOLUME, device_class=SensorDeviceClass.VOLUME, state_class=SensorStateClass.TOTAL_INCREASING, + translation_key="chem_last_dose_volume", + translation_placeholders={"chem": "pH"}, ), ] @@ -216,10 +247,12 @@ SUPPORTED_SCG_SENSORS = [ data_root=(DEVICE.SCG, GROUP.SENSOR), key=VALUE.SALT_PPM, state_class=SensorStateClass.MEASUREMENT, + translation_key="salt_ppm", ), ScreenLogicSensorDescription( data_root=(DEVICE.SCG, GROUP.CONFIGURATION), key=VALUE.SUPER_CHLOR_TIMER, + translation_key="super_chlor_timer", ), ] diff --git a/homeassistant/components/screenlogic/strings.json b/homeassistant/components/screenlogic/strings.json index 91395a0e86d..da5e3156592 100644 --- a/homeassistant/components/screenlogic/strings.json +++ b/homeassistant/components/screenlogic/strings.json @@ -1,4 +1,11 @@ { + "common": { + "service_config_entry_name": "Config entry", + "service_config_entry_description": "The config entry to use for this action.", + "climate_preset_solar": "Solar", + "climate_preset_solar_prefered": "Solar Prefered", + "climate_preset_heater": "Heater" + }, "config": { "flow_title": "{name}", "error": { @@ -42,8 +49,8 @@ "description": "Sets the color mode for all color-capable lights attached to this ScreenLogic gateway.", "fields": { "config_entry": { - "name": "Config Entry", - "description": "The config entry to use for this action." + "name": "[%key:component::screenlogic::common::service_config_entry_name%]", + "description": "[%key:component::screenlogic::common::service_config_entry_description%]" }, "color_mode": { "name": "Color Mode", @@ -56,8 +63,8 @@ "description": "Begins super chlorination, running for the specified period or 24 hours if none is specified.", "fields": { "config_entry": { - "name": "Config Entry", - "description": "The config entry to use for this action." + "name": "[%key:component::screenlogic::common::service_config_entry_name%]", + "description": "[%key:component::screenlogic::common::service_config_entry_description%]" }, "runtime": { "name": "Run Time", @@ -70,10 +77,134 @@ "description": "Stops super chlorination.", "fields": { "config_entry": { - "name": "Config Entry", - "description": "The config entry to use for this action." + "name": "[%key:component::screenlogic::common::service_config_entry_name%]", + "description": "[%key:component::screenlogic::common::service_config_entry_description%]" } } } + }, + "entity": { + "binary_sensor": { + "active_alert": { + "name": "Active alert" + }, + "pool_delay": { + "name": "Pool delay" + }, + "spa_delay": { + "name": "Spa delay" + }, + "cleaner_delay": { + "name": "Cleaner delay" + }, + "freeze_mode": { + "name": "Freeze mode" + }, + "flow_alarm": { + "name": "Flow alarm" + }, + "chem_high_alarm": { + "name": "{chem} high alarm" + }, + "chem_low_alarm": { + "name": "{chem} low alarm" + }, + "chem_supply_alarm": { + "name": "{chem} supply alarm" + }, + "probe_fault_alarm": { + "name": "Probe fault" + }, + "chem_limit": { + "name": "{chem} dose limit reached" + }, + "ph_lockout": { + "name": "pH lockout" + }, + "corosive": { + "name": "SI corrosive" + }, + "scaling": { + "name": "SI scaling" + }, + "scg_state": { + "name": "Chlorinator" + } + }, + "climate": { + "body_0": { + "name": "Pool heat" + }, + "body_1": { + "name": "Spa heat" + } + }, + "number": { + "calcium_hardness": { + "name": "Calcium hardness" + }, + "cya": { + "name": "Cyanuric acid" + }, + "total_alkalinity": { + "name": "Total alkalinity" + }, + "salt_tds_ppm": { + "name": "Salt/TDS" + }, + "pool_setpoint": { + "name": "Pool chlorinator setpoint" + }, + "spa_setpoint": { + "name": "Spa chlorinator setpoint" + } + }, + "sensor": { + "air_temperature": { + "name": "Air temperature" + }, + "chem_now": { + "name": "{chem} now" + }, + "chem_supply_level": { + "name": "{chem} supply level" + }, + "ph_probe_water_temp": { + "name": "pH probe water temperature" + }, + "saturation": { + "name": "Saturation index" + }, + "chem_setpoint": { + "name": "{chem} setpoint" + }, + "calcium_hardness": { + "name": "[%key:component::screenlogic::entity::number::calcium_hardness::name%]" + }, + "cya": { + "name": "[%key:component::screenlogic::entity::number::cya::name%]" + }, + "total_alkalinity": { + "name": "[%key:component::screenlogic::entity::number::total_alkalinity::name%]" + }, + "salt_tds_ppm": { + "name": "[%key:component::screenlogic::entity::number::salt_tds_ppm::name%]" + }, + "chem_dose_state": { + "name": "{chem} dosing state" + }, + "chem_last_dose_time": { + "name": "{chem} last dose time" + }, + "chem_last_dose_volume": { + "name": "{chem} last dose volume" + }, + "salt_ppm": { + "name": "Chlorinator salt" + }, + "super_chlor_timer": { + "name": "Super chlorination timer" + } + } } } diff --git a/tests/components/screenlogic/test_init.py b/tests/components/screenlogic/test_init.py index f21a1118b4f..b177f860606 100644 --- a/tests/components/screenlogic/test_init.py +++ b/tests/components/screenlogic/test_init.py @@ -43,7 +43,7 @@ TEST_MIGRATING_ENTITIES = [ EntityMigrationData( "Chemistry Alarm", "chem_alarm", - "Active Alert", + "Active alert", "active_alert", BINARY_SENSOR_DOMAIN, ), From 3788e942a7cedca27d4a67830c8aa0b9cc48e043 Mon Sep 17 00:00:00 2001 From: Dan Raper Date: Sat, 21 Dec 2024 08:25:34 +0000 Subject: [PATCH 549/677] Bump Ohme library version to 1.2.0 (#133666) Bump library version --- homeassistant/components/ohme/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ohme/manifest.json b/homeassistant/components/ohme/manifest.json index c9e1ccf9ac2..4ab0697bbb7 100644 --- a/homeassistant/components/ohme/manifest.json +++ b/homeassistant/components/ohme/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "quality_scale": "silver", - "requirements": ["ohme==1.1.1"] + "requirements": ["ohme==1.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2cae5d93bf8..4a05da9d61a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1522,7 +1522,7 @@ odp-amsterdam==6.0.2 oemthermostat==1.1.1 # homeassistant.components.ohme -ohme==1.1.1 +ohme==1.2.0 # homeassistant.components.ollama ollama==0.3.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a286cc9a0da..032165b6182 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1270,7 +1270,7 @@ objgraph==3.5.0 odp-amsterdam==6.0.2 # homeassistant.components.ohme -ohme==1.1.1 +ohme==1.2.0 # homeassistant.components.ollama ollama==0.3.3 From 9c70ec4150a882f6e23ad53ba8e0664dc48d2f44 Mon Sep 17 00:00:00 2001 From: Florent Thoumie Date: Sat, 21 Dec 2024 00:26:38 -0800 Subject: [PATCH 550/677] iaqualink: fix load_verify_locations() blocking call (#133459) * Try to fix blocking call * Fix lint --- homeassistant/components/iaqualink/config_flow.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/iaqualink/config_flow.py b/homeassistant/components/iaqualink/config_flow.py index 2cb1ba4b5d7..a307c1af98d 100644 --- a/homeassistant/components/iaqualink/config_flow.py +++ b/homeassistant/components/iaqualink/config_flow.py @@ -14,6 +14,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers.httpx_client import get_async_client from .const import DOMAIN @@ -34,7 +35,9 @@ class AqualinkFlowHandler(ConfigFlow, domain=DOMAIN): password = user_input[CONF_PASSWORD] try: - async with AqualinkClient(username, password): + async with AqualinkClient( + username, password, httpx_client=get_async_client(self.hass) + ): pass except AqualinkServiceUnauthorizedException: errors["base"] = "invalid_auth" From 954b6133cbe9fdd618c903cc11b95bbd283446f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Sat, 21 Dec 2024 08:35:47 +0000 Subject: [PATCH 551/677] Use common mock fixture in Idasen Desk config flow tests (#133679) --- .../components/idasen_desk/quality_scale.yaml | 5 +- tests/components/idasen_desk/conftest.py | 22 ++++-- .../idasen_desk/test_config_flow.py | 77 ++++++++----------- 3 files changed, 48 insertions(+), 56 deletions(-) diff --git a/homeassistant/components/idasen_desk/quality_scale.yaml b/homeassistant/components/idasen_desk/quality_scale.yaml index f91fd16176d..9aca846e32c 100644 --- a/homeassistant/components/idasen_desk/quality_scale.yaml +++ b/homeassistant/components/idasen_desk/quality_scale.yaml @@ -10,10 +10,7 @@ rules: This integration does not use polling. brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: | - - use mock_desk_api + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: diff --git a/tests/components/idasen_desk/conftest.py b/tests/components/idasen_desk/conftest.py index 24ef8311445..20cc95e8ae4 100644 --- a/tests/components/idasen_desk/conftest.py +++ b/tests/components/idasen_desk/conftest.py @@ -19,9 +19,14 @@ def mock_bluetooth(enable_bluetooth: None) -> Generator[None]: @pytest.fixture(autouse=False) def mock_desk_api(): """Set up idasen desk API fixture.""" - with mock.patch( - "homeassistant.components.idasen_desk.coordinator.Desk" - ) as desk_patched: + with ( + mock.patch( + "homeassistant.components.idasen_desk.coordinator.Desk" + ) as desk_patched, + mock.patch( + "homeassistant.components.idasen_desk.config_flow.Desk", new=desk_patched + ), + ): mock_desk = MagicMock() def mock_init( @@ -33,17 +38,20 @@ def mock_desk_api(): desk_patched.side_effect = mock_init - async def mock_connect(ble_device): + async def mock_connect(ble_device, retry: bool = True): mock_desk.is_connected = True - mock_desk.trigger_update_callback(None) + if mock_desk.trigger_update_callback: + mock_desk.trigger_update_callback(None) async def mock_disconnect(): mock_desk.is_connected = False - mock_desk.trigger_update_callback(None) + if mock_desk.trigger_update_callback: + mock_desk.trigger_update_callback(None) async def mock_move_to(height: float): mock_desk.height_percent = height - mock_desk.trigger_update_callback(height) + if mock_desk.trigger_update_callback: + mock_desk.trigger_update_callback(height) async def mock_move_up(): await mock_move_to(100) diff --git a/tests/components/idasen_desk/test_config_flow.py b/tests/components/idasen_desk/test_config_flow.py index be729545b88..baeed6be1ab 100644 --- a/tests/components/idasen_desk/test_config_flow.py +++ b/tests/components/idasen_desk/test_config_flow.py @@ -1,6 +1,6 @@ """Test the IKEA Idasen Desk config flow.""" -from unittest.mock import ANY, patch +from unittest.mock import ANY, MagicMock, patch from bleak.exc import BleakError from idasen_ha.errors import AuthFailedError @@ -17,7 +17,7 @@ from . import IDASEN_DISCOVERY_INFO, NOT_IDASEN_DISCOVERY_INFO from tests.common import MockConfigEntry -async def test_user_step_success(hass: HomeAssistant) -> None: +async def test_user_step_success(hass: HomeAssistant, mock_desk_api: MagicMock) -> None: """Test user step success path.""" with patch( "homeassistant.components.idasen_desk.config_flow.async_discovered_service_info", @@ -30,14 +30,9 @@ async def test_user_step_success(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - with ( - patch("homeassistant.components.idasen_desk.config_flow.Desk.connect"), - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - patch( - "homeassistant.components.idasen_desk.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): + with patch( + "homeassistant.components.idasen_desk.async_setup_entry", return_value=True + ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -99,7 +94,10 @@ async def test_user_step_no_new_devices_found(hass: HomeAssistant) -> None: ], ) async def test_user_step_cannot_connect( - hass: HomeAssistant, exception: Exception, expected_error: str + hass: HomeAssistant, + mock_desk_api: MagicMock, + exception: Exception, + expected_error: str, ) -> None: """Test user step with a cannot connect error.""" with patch( @@ -113,33 +111,26 @@ async def test_user_step_cannot_connect( assert result["step_id"] == "user" assert result["errors"] == {} - with ( - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.connect", - side_effect=exception, - ), - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, - }, - ) - await hass.async_block_till_done() + default_connect_side_effect = mock_desk_api.connect.side_effect + mock_desk_api.connect.side_effect = exception + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: IDASEN_DISCOVERY_INFO.address, + }, + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "user" assert result2["errors"] == {"base": expected_error} - with ( - patch("homeassistant.components.idasen_desk.config_flow.Desk.connect"), - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - patch( - "homeassistant.components.idasen_desk.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): + mock_desk_api.connect.side_effect = default_connect_side_effect + with patch( + "homeassistant.components.idasen_desk.async_setup_entry", + return_value=True, + ) as mock_setup_entry: result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], { @@ -157,7 +148,9 @@ async def test_user_step_cannot_connect( assert len(mock_setup_entry.mock_calls) == 1 -async def test_bluetooth_step_success(hass: HomeAssistant) -> None: +async def test_bluetooth_step_success( + hass: HomeAssistant, mock_desk_api: MagicMock +) -> None: """Test bluetooth step success path.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -168,16 +161,10 @@ async def test_bluetooth_step_success(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - with ( - patch( - "homeassistant.components.idasen_desk.config_flow.Desk.connect" - ) as desk_connect, - patch("homeassistant.components.idasen_desk.config_flow.Desk.disconnect"), - patch( - "homeassistant.components.idasen_desk.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): + with patch( + "homeassistant.components.idasen_desk.async_setup_entry", + return_value=True, + ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -193,4 +180,4 @@ async def test_bluetooth_step_success(hass: HomeAssistant) -> None: } assert result2["result"].unique_id == IDASEN_DISCOVERY_INFO.address assert len(mock_setup_entry.mock_calls) == 1 - desk_connect.assert_called_with(ANY, retry=False) + mock_desk_api.connect.assert_called_with(ANY, retry=False) From 82f54eb9d221ddc46a731b912e6203ca8f6cd173 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sat, 21 Dec 2024 09:38:59 +0100 Subject: [PATCH 552/677] Adjust the default backup name (#133668) --- homeassistant/components/backup/manager.py | 5 ++++- tests/components/backup/snapshots/test_websocket.ambr | 6 +++--- tests/components/backup/test_manager.py | 6 +++--- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index a27c1cc7170..9b20c82d709 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -726,7 +726,10 @@ class BackupManager: "Cannot include all addons and specify specific addons" ) - backup_name = name or f"Core {HAVERSION}" + backup_name = ( + name + or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}" + ) new_backup, self._backup_task = await self._reader_writer.async_create_backup( agent_ids=agent_ids, backup_name=backup_name, diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 4de06861b67..16640a95ddb 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -2574,7 +2574,7 @@ dict({ 'id': 2, 'result': dict({ - 'backup_job_id': '27f5c632', + 'backup_job_id': 'fceef4e6', }), 'success': True, 'type': 'result', @@ -2645,7 +2645,7 @@ dict({ 'id': 2, 'result': dict({ - 'backup_job_id': '27f5c632', + 'backup_job_id': 'fceef4e6', }), 'success': True, 'type': 'result', @@ -2716,7 +2716,7 @@ dict({ 'id': 2, 'result': dict({ - 'backup_job_id': '27f5c632', + 'backup_job_id': 'fceef4e6', }), 'success': True, 'type': 'result', diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 1c45c86149b..9b652edb087 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -121,7 +121,7 @@ async def test_async_create_backup( assert create_backup.called assert create_backup.call_args == call( agent_ids=["backup.local"], - backup_name="Core 2025.1.0", + backup_name="Custom 2025.1.0", extra_metadata={ "instance_id": hass.data["core.uuid"], "with_automatic_settings": False, @@ -254,7 +254,7 @@ async def test_async_initiate_backup( ws_client = await hass_ws_client(hass) include_database = params.get("include_database", True) - name = params.get("name", "Core 2025.1.0") + name = params.get("name", "Custom 2025.1.0") password = params.get("password") path_glob.return_value = [] @@ -502,7 +502,7 @@ async def test_async_initiate_backup_with_agent_error( "folders": [], "homeassistant_included": True, "homeassistant_version": "2025.1.0", - "name": "Core 2025.1.0", + "name": "Custom 2025.1.0", "protected": False, "size": 123, "with_automatic_settings": False, From e43f4466e0f7e04018e5a68aa0f23667a7d76f0d Mon Sep 17 00:00:00 2001 From: Andrew Sayre <6730289+andrewsayre@users.noreply.github.com> Date: Sat, 21 Dec 2024 02:40:33 -0600 Subject: [PATCH 553/677] Improve HEOS group handling (#132213) * Move register method to GroupManager * Remove GroupManager mapping when entity removed * Add test for when unloaded * Error when group member not found * Use entity registery to remove entity * Update tests per feedback --- homeassistant/components/heos/__init__.py | 43 ++++++++++--------- homeassistant/components/heos/media_player.py | 14 ++++-- tests/components/heos/test_media_player.py | 32 ++++++++++++++ 3 files changed, 66 insertions(+), 23 deletions(-) diff --git a/homeassistant/components/heos/__init__.py b/homeassistant/components/heos/__init__.py index e6a46f5a4ca..b9b9b30a280 100644 --- a/homeassistant/components/heos/__init__.py +++ b/homeassistant/components/heos/__init__.py @@ -11,7 +11,7 @@ from pyheos import Heos, HeosError, HeosPlayer, const as heos_const from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import ( @@ -259,21 +259,19 @@ class GroupManager: return group_info_by_entity_id async def async_join_players( - self, leader_entity_id: str, member_entity_ids: list[str] + self, leader_id: int, leader_entity_id: str, member_entity_ids: list[str] ) -> None: """Create a group a group leader and member players.""" + # Resolve HEOS player_id for each member entity_id entity_id_to_player_id_map = self._get_entity_id_to_player_id_map() - leader_id = entity_id_to_player_id_map.get(leader_entity_id) - if not leader_id: - raise HomeAssistantError( - f"The group leader {leader_entity_id} could not be resolved to a HEOS" - " player." - ) - member_ids = [ - entity_id_to_player_id_map[member] - for member in member_entity_ids - if member in entity_id_to_player_id_map - ] + member_ids: list[int] = [] + for member in member_entity_ids: + member_id = entity_id_to_player_id_map.get(member) + if not member_id: + raise HomeAssistantError( + f"The group member {member} could not be resolved to a HEOS player." + ) + member_ids.append(member_id) try: await self.controller.create_group(leader_id, member_ids) @@ -285,14 +283,8 @@ class GroupManager: err, ) - async def async_unjoin_player(self, player_entity_id: str): + async def async_unjoin_player(self, player_id: int, player_entity_id: str): """Remove `player_entity_id` from any group.""" - player_id = self._get_entity_id_to_player_id_map().get(player_entity_id) - if not player_id: - raise HomeAssistantError( - f"The player {player_entity_id} could not be resolved to a HEOS player." - ) - try: await self.controller.create_group(player_id, []) except HeosError as err: @@ -345,6 +337,17 @@ class GroupManager: self._disconnect_player_added() self._disconnect_player_added = None + @callback + def register_media_player(self, player_id: int, entity_id: str) -> CALLBACK_TYPE: + """Register a media player player_id with it's entity_id so it can be resolved later.""" + self.entity_id_map[player_id] = entity_id + return lambda: self.unregister_media_player(player_id) + + @callback + def unregister_media_player(self, player_id) -> None: + """Remove a media player player_id from the entity_id map.""" + self.entity_id_map.pop(player_id, None) + @property def group_membership(self): """Provide access to group members for player entities.""" diff --git a/homeassistant/components/heos/media_player.py b/homeassistant/components/heos/media_player.py index 5255d369c2f..be816849e32 100644 --- a/homeassistant/components/heos/media_player.py +++ b/homeassistant/components/heos/media_player.py @@ -160,7 +160,11 @@ class HeosMediaPlayer(MediaPlayerEntity): async_dispatcher_connect(self.hass, SIGNAL_HEOS_UPDATED, self._heos_updated) ) # Register this player's entity_id so it can be resolved by the group manager - self._group_manager.entity_id_map[self._player.player_id] = self.entity_id + self.async_on_remove( + self._group_manager.register_media_player( + self._player.player_id, self.entity_id + ) + ) async_dispatcher_send(self.hass, SIGNAL_HEOS_PLAYER_ADDED) @log_command_error("clear playlist") @@ -171,7 +175,9 @@ class HeosMediaPlayer(MediaPlayerEntity): @log_command_error("join_players") async def async_join_players(self, group_members: list[str]) -> None: """Join `group_members` as a player group with the current player.""" - await self._group_manager.async_join_players(self.entity_id, group_members) + await self._group_manager.async_join_players( + self._player.player_id, self.entity_id, group_members + ) @log_command_error("pause") async def async_media_pause(self) -> None: @@ -294,7 +300,9 @@ class HeosMediaPlayer(MediaPlayerEntity): @log_command_error("unjoin_player") async def async_unjoin_player(self) -> None: """Remove this player from any group.""" - await self._group_manager.async_unjoin_player(self.entity_id) + await self._group_manager.async_unjoin_player( + self._player.player_id, self.entity_id + ) async def async_will_remove_from_hass(self) -> None: """Disconnect the device when removed.""" diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index fa3f01107c1..355cb47a0d9 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -51,6 +51,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.setup import async_setup_component @@ -1051,3 +1052,34 @@ async def test_media_player_unjoin_group( blocking=True, ) assert "Failed to ungroup media_player.test_player" in caplog.text + + +async def test_media_player_group_fails_when_entity_removed( + hass: HomeAssistant, + config_entry, + config, + controller, + entity_registry: er.EntityRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test grouping fails when entity removed.""" + await setup_platform(hass, config_entry, config) + + # Remove one of the players + entity_registry.async_remove("media_player.test_player_2") + + # Attempt to group + with pytest.raises( + HomeAssistantError, + match="The group member media_player.test_player_2 could not be resolved to a HEOS player.", + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_GROUP_MEMBERS: ["media_player.test_player_2"], + }, + blocking=True, + ) + controller.create_group.assert_not_called() From 02785a4ded3961e9b9c6a4862f0b1ae0c757a6b8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 20 Dec 2024 23:37:16 -1000 Subject: [PATCH 554/677] Simplify query to find oldest state (#133700) --- homeassistant/components/recorder/queries.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 8ca7bef2691..34e9ec32f99 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -640,9 +640,9 @@ def find_states_to_purge( def find_oldest_state() -> StatementLambdaElement: """Find the last_updated_ts of the oldest state.""" return lambda_stmt( - lambda: select(States.last_updated_ts).where( - States.state_id.in_(select(func.min(States.state_id))) - ) + lambda: select(States.last_updated_ts) + .order_by(States.last_updated_ts.asc()) + .limit(1) ) From 43fab48d4e92c613570ed56e149236c962c93f8c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 20 Dec 2024 23:53:15 -1000 Subject: [PATCH 555/677] Improve purge performance for PostgreSQL with large databases (#133699) --- homeassistant/components/recorder/models/database.py | 4 ++++ homeassistant/components/recorder/purge.py | 4 ++++ homeassistant/components/recorder/queries.py | 4 ++-- homeassistant/components/recorder/util.py | 6 ++++++ tests/components/recorder/test_util.py | 2 +- 5 files changed, 17 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/recorder/models/database.py b/homeassistant/components/recorder/models/database.py index 94c5a7cc027..b86fd299793 100644 --- a/homeassistant/components/recorder/models/database.py +++ b/homeassistant/components/recorder/models/database.py @@ -32,4 +32,8 @@ class DatabaseOptimizer: # # https://jira.mariadb.org/browse/MDEV-25020 # + # PostgreSQL does not support a skip/loose index scan so its + # also slow for large distinct queries: + # https://wiki.postgresql.org/wiki/Loose_indexscan + # https://github.com/home-assistant/core/issues/126084 slow_range_in_select: bool diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 11f5accc978..881952c390d 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -346,6 +346,10 @@ def _select_unused_attributes_ids( # We now break the query into groups of 100 and use a lambda_stmt to ensure # that the query is only cached once. # + # PostgreSQL also suffers from the same issue as older MariaDB with the distinct query + # when the database gets large because it doesn't support skip/loose index scan. + # https://wiki.postgresql.org/wiki/Loose_indexscan + # https://github.com/home-assistant/core/issues/126084 groups = [iter(attributes_ids)] * 100 for attr_ids in zip_longest(*groups, fillvalue=None): seen_ids |= { diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 34e9ec32f99..7ac4c19bc94 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -78,7 +78,7 @@ def find_states_metadata_ids(entity_ids: Iterable[str]) -> StatementLambdaElemen def _state_attrs_exist(attr: int | None) -> Select: """Check if a state attributes id exists in the states table.""" - return select(func.min(States.attributes_id)).where(States.attributes_id == attr) + return select(States.attributes_id).where(States.attributes_id == attr).limit(1) def attributes_ids_exist_in_states_with_fast_in_distinct( @@ -315,7 +315,7 @@ def data_ids_exist_in_events_with_fast_in_distinct( def _event_data_id_exist(data_id: int | None) -> Select: """Check if a event data id exists in the events table.""" - return select(func.min(Events.data_id)).where(Events.data_id == data_id) + return select(Events.data_id).where(Events.data_id == data_id).limit(1) def data_ids_exist_in_events( diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index ba4c5194689..4cf24eb79c5 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -600,6 +600,12 @@ def setup_connection_for_dialect( execute_on_connection(dbapi_connection, "SET time_zone = '+00:00'") elif dialect_name == SupportedDialect.POSTGRESQL: max_bind_vars = DEFAULT_MAX_BIND_VARS + # PostgreSQL does not support a skip/loose index scan so its + # also slow for large distinct queries: + # https://wiki.postgresql.org/wiki/Loose_indexscan + # https://github.com/home-assistant/core/issues/126084 + # so we set slow_range_in_select to True + slow_range_in_select = True if first_connection: # server_version_num was added in 2006 result = query_on_connection(dbapi_connection, "SHOW server_version") diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 99bd5083489..aeeeba1865a 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -502,7 +502,7 @@ def test_supported_pgsql(caplog: pytest.LogCaptureFixture, pgsql_version) -> Non assert "minimum supported version" not in caplog.text assert database_engine is not None - assert database_engine.optimizer.slow_range_in_select is False + assert database_engine.optimizer.slow_range_in_select is True @pytest.mark.parametrize( From 4a063c3f9e8e5187245b1a5d38f9705e06496e21 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 21 Dec 2024 01:54:13 -0800 Subject: [PATCH 556/677] Update the Google Tasks quality scale with documentation improvements (#133701) --- .../google_tasks/quality_scale.yaml | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/google_tasks/quality_scale.yaml b/homeassistant/components/google_tasks/quality_scale.yaml index 79d216709e5..dd1cd67d8e2 100644 --- a/homeassistant/components/google_tasks/quality_scale.yaml +++ b/homeassistant/components/google_tasks/quality_scale.yaml @@ -19,7 +19,7 @@ rules: unique-config-entry: done entity-unique-id: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done test-before-setup: done docs-high-level-description: done config-flow-test-coverage: done @@ -33,35 +33,37 @@ rules: config-entry-unloading: done reauthentication-flow: done action-exceptions: done - docs-installation-parameters: todo + docs-installation-parameters: done integration-owner: done parallel-updates: done test-coverage: done - docs-configuration-parameters: todo + docs-configuration-parameters: + status: exempt + comment: The integration does not have any configuration parameters. entity-unavailable: done # Gold - docs-examples: todo + docs-examples: done discovery-update-info: todo entity-device-class: todo entity-translations: todo - docs-data-update: todo + docs-data-update: done entity-disabled-by-default: todo discovery: todo exception-translations: todo devices: todo - docs-supported-devices: todo + docs-supported-devices: done icon-translations: todo - docs-known-limitations: todo + docs-known-limitations: done stale-devices: todo - docs-supported-functions: todo + docs-supported-functions: done repair-issues: todo reconfiguration-flow: todo entity-category: todo dynamic-devices: todo - docs-troubleshooting: todo + docs-troubleshooting: done diagnostics: todo - docs-use-cases: todo + docs-use-cases: done # Platinum async-dependency: todo From 859993e443f4956845dd03846f9e5673887724ee Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 10:55:00 +0100 Subject: [PATCH 557/677] Add update platform to Peblar Rocksolid EV Chargers integration (#133570) * Add update platform to Peblar Rocksolid EV Chargers integration * Use device class translations --- homeassistant/components/peblar/__init__.py | 55 ++++++++++- .../components/peblar/coordinator.py | 55 ++++++++++- homeassistant/components/peblar/entity.py | 26 ------ homeassistant/components/peblar/icons.json | 9 ++ homeassistant/components/peblar/sensor.py | 23 +++-- homeassistant/components/peblar/strings.json | 7 ++ homeassistant/components/peblar/update.py | 93 +++++++++++++++++++ 7 files changed, 229 insertions(+), 39 deletions(-) delete mode 100644 homeassistant/components/peblar/entity.py create mode 100644 homeassistant/components/peblar/icons.json create mode 100644 homeassistant/components/peblar/update.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index 559b124c772..d1da6ce83b7 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import asyncio + from aiohttp import CookieJar from peblar import ( AccessMode, @@ -14,22 +16,34 @@ from peblar import ( from homeassistant.const import CONF_HOST, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_create_clientsession -from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator +from .const import DOMAIN +from .coordinator import ( + PeblarConfigEntry, + PeblarMeterDataUpdateCoordinator, + PeblarRuntimeData, + PeblarVersionDataUpdateCoordinator, +) -PLATFORMS = [Platform.SENSOR] +PLATFORMS = [ + Platform.SENSOR, + Platform.UPDATE, +] async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bool: """Set up Peblar from a config entry.""" + # Set up connection to the Peblar charger peblar = Peblar( host=entry.data[CONF_HOST], session=async_create_clientsession(hass, cookie_jar=CookieJar(unsafe=True)), ) try: await peblar.login(password=entry.data[CONF_PASSWORD]) + system_information = await peblar.system_information() api = await peblar.rest_api(enable=True, access_mode=AccessMode.READ_WRITE) except PeblarConnectionError as err: raise ConfigEntryNotReady("Could not connect to Peblar charger") from err @@ -40,10 +54,41 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo "Unknown error occurred while connecting to Peblar charger" ) from err - coordinator = PeblarMeterDataUpdateCoordinator(hass, entry, api) - await coordinator.async_config_entry_first_refresh() + # Setup the data coordinators + meter_coordinator = PeblarMeterDataUpdateCoordinator(hass, entry, api) + version_coordinator = PeblarVersionDataUpdateCoordinator(hass, entry, peblar) + await asyncio.gather( + meter_coordinator.async_config_entry_first_refresh(), + version_coordinator.async_config_entry_first_refresh(), + ) - entry.runtime_data = coordinator + # Store the runtime data + entry.runtime_data = PeblarRuntimeData( + system_information=system_information, + meter_coordinator=meter_coordinator, + version_coordinator=version_coordinator, + ) + + # Peblar is a single device integration. Setting up the device directly + # during setup. This way we only have to reference it in all entities. + device_registry = dr.async_get(hass) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + configuration_url=f"http://{entry.data[CONF_HOST]}", + connections={ + (dr.CONNECTION_NETWORK_MAC, system_information.ethernet_mac_address), + (dr.CONNECTION_NETWORK_MAC, system_information.wlan_mac_address), + }, + identifiers={(DOMAIN, system_information.product_serial_number)}, + manufacturer=system_information.product_vendor_name, + model_id=system_information.product_number, + model=system_information.product_model_name, + name="Peblar EV Charger", + serial_number=system_information.product_serial_number, + sw_version=version_coordinator.data.current.firmware, + ) + + # Forward the setup to the platforms await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index 8270905648f..f83ed8f4dda 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -1,16 +1,67 @@ """Data update coordinator for Peblar EV chargers.""" +from __future__ import annotations + +from dataclasses import dataclass from datetime import timedelta -from peblar import PeblarApi, PeblarError, PeblarMeter +from peblar import Peblar, PeblarApi, PeblarError, PeblarMeter, PeblarVersions from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from tests.components.peblar.conftest import PeblarSystemInformation from .const import LOGGER -type PeblarConfigEntry = ConfigEntry[PeblarMeterDataUpdateCoordinator] + +@dataclass(kw_only=True) +class PeblarRuntimeData: + """Class to hold runtime data.""" + + system_information: PeblarSystemInformation + meter_coordinator: PeblarMeterDataUpdateCoordinator + version_coordinator: PeblarVersionDataUpdateCoordinator + + +type PeblarConfigEntry = ConfigEntry[PeblarRuntimeData] + + +@dataclass(kw_only=True, frozen=True) +class PeblarVersionInformation: + """Class to hold version information.""" + + current: PeblarVersions + available: PeblarVersions + + +class PeblarVersionDataUpdateCoordinator( + DataUpdateCoordinator[PeblarVersionInformation] +): + """Class to manage fetching Peblar version information.""" + + def __init__( + self, hass: HomeAssistant, entry: PeblarConfigEntry, peblar: Peblar + ) -> None: + """Initialize the coordinator.""" + self.peblar = peblar + super().__init__( + hass, + LOGGER, + config_entry=entry, + name=f"Peblar {entry.title} version", + update_interval=timedelta(hours=2), + ) + + async def _async_update_data(self) -> PeblarVersionInformation: + """Fetch data from the Peblar device.""" + try: + return PeblarVersionInformation( + current=await self.peblar.current_versions(), + available=await self.peblar.available_versions(), + ) + except PeblarError as err: + raise UpdateFailed(err) from err class PeblarMeterDataUpdateCoordinator(DataUpdateCoordinator[PeblarMeter]): diff --git a/homeassistant/components/peblar/entity.py b/homeassistant/components/peblar/entity.py deleted file mode 100644 index 6951cf6c21f..00000000000 --- a/homeassistant/components/peblar/entity.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Base entity for the Peblar integration.""" - -from __future__ import annotations - -from homeassistant.const import CONF_HOST -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity - -from .const import DOMAIN -from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator - - -class PeblarEntity(CoordinatorEntity[PeblarMeterDataUpdateCoordinator]): - """Defines a Peblar entity.""" - - _attr_has_entity_name = True - - def __init__(self, entry: PeblarConfigEntry) -> None: - """Initialize the Peblar entity.""" - super().__init__(coordinator=entry.runtime_data) - self._attr_device_info = DeviceInfo( - configuration_url=f"http://{entry.data[CONF_HOST]}", - identifiers={(DOMAIN, str(entry.unique_id))}, - manufacturer="Peblar", - name="Peblar EV charger", - ) diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json new file mode 100644 index 00000000000..073cd08a2c7 --- /dev/null +++ b/homeassistant/components/peblar/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "update": { + "customization": { + "default": "mdi:palette" + } + } + } +} diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index eafca23e125..d31d929fcab 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -15,10 +15,12 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import UnitOfEnergy from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .coordinator import PeblarConfigEntry -from .entity import PeblarEntity +from .const import DOMAIN +from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator @dataclass(frozen=True, kw_only=True) @@ -28,7 +30,7 @@ class PeblarSensorDescription(SensorEntityDescription): value_fn: Callable[[PeblarMeter], int | None] -SENSORS: tuple[PeblarSensorDescription, ...] = ( +DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( PeblarSensorDescription( key="energy_total", device_class=SensorDeviceClass.ENERGY, @@ -48,24 +50,33 @@ async def async_setup_entry( ) -> None: """Set up Peblar sensors based on a config entry.""" async_add_entities( - PeblarSensorEntity(entry, description) for description in SENSORS + PeblarSensorEntity(entry, description) for description in DESCRIPTIONS ) -class PeblarSensorEntity(PeblarEntity, SensorEntity): +class PeblarSensorEntity( + CoordinatorEntity[PeblarMeterDataUpdateCoordinator], SensorEntity +): """Defines a Peblar sensor.""" entity_description: PeblarSensorDescription + _attr_has_entity_name = True + def __init__( self, entry: PeblarConfigEntry, description: PeblarSensorDescription, ) -> None: """Initialize the Peblar entity.""" - super().__init__(entry) + super().__init__(entry.runtime_data.meter_coordinator) self.entity_description = description self._attr_unique_id = f"{entry.unique_id}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) @property def native_value(self) -> int | None: diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index e5fa1e85a6a..2e23fcfcdcd 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -31,5 +31,12 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "no_serial_number": "The discovered Peblar device did not provide a serial number." } + }, + "entity": { + "update": { + "customization": { + "name": "Customization" + } + } } } diff --git a/homeassistant/components/peblar/update.py b/homeassistant/components/peblar/update.py new file mode 100644 index 00000000000..cc0f1ee0c79 --- /dev/null +++ b/homeassistant/components/peblar/update.py @@ -0,0 +1,93 @@ +"""Support for Peblar updates.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from homeassistant.components.update import ( + UpdateDeviceClass, + UpdateEntity, + UpdateEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ( + PeblarConfigEntry, + PeblarVersionDataUpdateCoordinator, + PeblarVersionInformation, +) + + +@dataclass(frozen=True, kw_only=True) +class PeblarUpdateEntityDescription(UpdateEntityDescription): + """Describe an Peblar update entity.""" + + installed_fn: Callable[[PeblarVersionInformation], str | None] + available_fn: Callable[[PeblarVersionInformation], str | None] + + +DESCRIPTIONS: tuple[PeblarUpdateEntityDescription, ...] = ( + PeblarUpdateEntityDescription( + key="firmware", + device_class=UpdateDeviceClass.FIRMWARE, + installed_fn=lambda x: x.current.firmware, + available_fn=lambda x: x.available.firmware, + ), + PeblarUpdateEntityDescription( + key="customization", + translation_key="customization", + installed_fn=lambda x: x.current.customization, + available_fn=lambda x: x.available.customization, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar update based on a config entry.""" + async_add_entities( + PeblarUpdateEntity(entry, description) for description in DESCRIPTIONS + ) + + +class PeblarUpdateEntity( + CoordinatorEntity[PeblarVersionDataUpdateCoordinator], UpdateEntity +): + """Defines a Peblar update entity.""" + + entity_description: PeblarUpdateEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarUpdateEntityDescription, + ) -> None: + """Initialize the update entity.""" + super().__init__(entry.runtime_data.version_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + + @property + def installed_version(self) -> str | None: + """Version currently installed and in use.""" + return self.entity_description.installed_fn(self.coordinator.data) + + @property + def latest_version(self) -> str | None: + """Latest version available for install.""" + return self.entity_description.available_fn(self.coordinator.data) From 4ee9f813aac9915f7982132cd7a470c06735f7a2 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 11:13:44 +0100 Subject: [PATCH 558/677] Fix inconsistent use of "pin" vs. "PIN" (#133685) --- homeassistant/components/frontier_silicon/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/frontier_silicon/strings.json b/homeassistant/components/frontier_silicon/strings.json index 03d9f28c016..d3e1cd84e4a 100644 --- a/homeassistant/components/frontier_silicon/strings.json +++ b/homeassistant/components/frontier_silicon/strings.json @@ -12,7 +12,7 @@ }, "device_config": { "title": "Device configuration", - "description": "The pin can be found via 'MENU button > Main Menu > System setting > Network > NetRemote PIN setup'", + "description": "The PIN can be found via 'MENU button > Main Menu > System setting > Network > NetRemote PIN setup'", "data": { "pin": "[%key:common::config_flow::data::pin%]" } From 7998a05742367e1186813d7402263209c2f9feb9 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 11:14:14 +0100 Subject: [PATCH 559/677] Replace lowercase "pin" in error message with the correct "PIN" (#133684) --- homeassistant/components/tessie/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/tessie/strings.json b/homeassistant/components/tessie/strings.json index 5b677594b42..4ac645a0270 100644 --- a/homeassistant/components/tessie/strings.json +++ b/homeassistant/components/tessie/strings.json @@ -521,7 +521,7 @@ "message": "{name} is already inactive." }, "incorrect_pin": { - "message": "Incorrect pin for {name}." + "message": "Incorrect PIN for {name}." }, "no_cable": { "message": "Insert cable to lock" From 4efcf18c70f7cfb43a61c7bb0f0abb6e812886b8 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 11:14:47 +0100 Subject: [PATCH 560/677] Change "pin" to "PIN" for consistency with common string (#133682) --- homeassistant/components/vulcan/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/vulcan/strings.json b/homeassistant/components/vulcan/strings.json index 814621b5403..61b5a954389 100644 --- a/homeassistant/components/vulcan/strings.json +++ b/homeassistant/components/vulcan/strings.json @@ -10,7 +10,7 @@ "unknown": "[%key:common::config_flow::error::unknown%]", "invalid_token": "[%key:common::config_flow::error::invalid_access_token%]", "expired_token": "Expired token - please generate a new token", - "invalid_pin": "Invalid pin", + "invalid_pin": "Invalid PIN", "invalid_symbol": "Invalid symbol", "expired_credentials": "Expired credentials - please create new on Vulcan mobile app registration page", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" From 989a3d1e24c54d0b173cb32a1dccefbecff30b5e Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 11:15:11 +0100 Subject: [PATCH 561/677] Change "pin" to correct "PIN" for consistent translations (#133681) --- homeassistant/components/ps4/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/ps4/strings.json b/homeassistant/components/ps4/strings.json index 6b1d4cd690b..778fa0215fb 100644 --- a/homeassistant/components/ps4/strings.json +++ b/homeassistant/components/ps4/strings.json @@ -21,7 +21,7 @@ "ip_address": "[%key:common::config_flow::data::ip%]" }, "data_description": { - "code": "On your PlayStation 4 console, go to **Settings**. Then, go to **Mobile App Connection Settings** and select **Add Device** to get the pin." + "code": "On your PlayStation 4 console, go to **Settings**. Then, go to **Mobile App Connection Settings** and select **Add Device** to get the PIN." } } }, From 4e316429d31df34d7e1c7eb69a368a4522fff315 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 21 Dec 2024 00:18:47 -1000 Subject: [PATCH 562/677] Handle WebsocketConnectionError during mqtt auto reconnect (#133697) followup to #133610 to handle the exception in the auto reconnect path as well fixes #132985 --- homeassistant/components/mqtt/client.py | 5 ++++- tests/components/mqtt/test_client.py | 10 +++++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 73c6b80cb14..6500c9f91c9 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -695,12 +695,15 @@ class MQTT: async def _reconnect_loop(self) -> None: """Reconnect to the MQTT server.""" + # pylint: disable-next=import-outside-toplevel + import paho.mqtt.client as mqtt + while True: if not self.connected: try: async with self._connection_lock, self._async_connect_in_executor(): await self.hass.async_add_executor_job(self._mqttc.reconnect) - except OSError as err: + except (OSError, mqtt.WebsocketConnectionError) as err: _LOGGER.debug( "Error re-connecting to MQTT server due to exception: %s", err ) diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 1878045a9b9..1daad0e3914 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -1888,10 +1888,18 @@ async def test_mqtt_subscribes_and_unsubscribes_in_chunks( assert len(mqtt_client_mock.unsubscribe.mock_calls[1][1][0]) == 2 +@pytest.mark.parametrize( + "exception", + [ + OSError, + paho_mqtt.WebsocketConnectionError, + ], +) async def test_auto_reconnect( hass: HomeAssistant, setup_with_birth_msg_client_mock: MqttMockPahoClient, caplog: pytest.LogCaptureFixture, + exception: Exception, ) -> None: """Test reconnection is automatically done.""" mqtt_client_mock = setup_with_birth_msg_client_mock @@ -1902,7 +1910,7 @@ async def test_auto_reconnect( mqtt_client_mock.on_disconnect(None, None, 0) await hass.async_block_till_done() - mqtt_client_mock.reconnect.side_effect = OSError("foo") + mqtt_client_mock.reconnect.side_effect = exception("foo") async_fire_time_changed( hass, utcnow() + timedelta(seconds=RECONNECT_INTERVAL_SECONDS) ) From 78c9e4742846f7fe3be7d9b94465c8fb8006cbc6 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sat, 21 Dec 2024 11:20:46 +0100 Subject: [PATCH 563/677] Improve BMW config flow (#133705) --- .../bmw_connected_drive/config_flow.py | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/config_flow.py b/homeassistant/components/bmw_connected_drive/config_flow.py index 04fb3842dfa..5a067d23474 100644 --- a/homeassistant/components/bmw_connected_drive/config_flow.py +++ b/homeassistant/components/bmw_connected_drive/config_flow.py @@ -103,9 +103,10 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - data: dict[str, Any] = {} - - _existing_entry_data: Mapping[str, Any] | None = None + def __init__(self) -> None: + """Initialize the config flow.""" + self.data: dict[str, Any] = {} + self._existing_entry_data: dict[str, Any] = {} async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -175,19 +176,15 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Show the change password step.""" - existing_data = ( - dict(self._existing_entry_data) if self._existing_entry_data else {} - ) - if user_input is not None: - return await self.async_step_user(existing_data | user_input) + return await self.async_step_user(self._existing_entry_data | user_input) return self.async_show_form( step_id="change_password", data_schema=RECONFIGURE_SCHEMA, description_placeholders={ - CONF_USERNAME: existing_data[CONF_USERNAME], - CONF_REGION: existing_data[CONF_REGION], + CONF_USERNAME: self._existing_entry_data[CONF_USERNAME], + CONF_REGION: self._existing_entry_data[CONF_REGION], }, ) @@ -195,14 +192,14 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" - self._existing_entry_data = entry_data + self._existing_entry_data = dict(entry_data) return await self.async_step_change_password() async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a reconfiguration flow initialized by the user.""" - self._existing_entry_data = self._get_reconfigure_entry().data + self._existing_entry_data = dict(self._get_reconfigure_entry().data) return await self.async_step_change_password() async def async_step_captcha( From 66e863a2e3866a37f5776b1810cc56611194e931 Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Sat, 21 Dec 2024 11:29:24 +0100 Subject: [PATCH 564/677] Allow lamarzocco to reconnect websocket (#133635) --- homeassistant/components/lamarzocco/coordinator.py | 7 +++++-- tests/components/lamarzocco/test_init.py | 5 ++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index 0b07409adb5..2385039f53d 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -91,9 +91,11 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): _scale_address: str | None = None - async def _async_setup(self) -> None: + async def _async_connect_websocket(self) -> None: """Set up the coordinator.""" - if self._local_client is not None: + if self._local_client is not None and ( + self._local_client.websocket is None or self._local_client.websocket.closed + ): _LOGGER.debug("Init WebSocket in background task") self.config_entry.async_create_background_task( @@ -123,6 +125,7 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator): """Fetch data from API endpoint.""" await self.device.get_config() _LOGGER.debug("Current status: %s", str(self.device.config)) + await self._async_connect_websocket() self._async_add_remove_scale() @callback diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 7d90c049a3b..fccfcbeef13 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -199,8 +199,11 @@ async def test_websocket_closed_on_unload( ) as local_client: client = local_client.return_value client.websocket = AsyncMock() - client.websocket.closed = False + await async_init_integration(hass, mock_config_entry) + mock_lamarzocco.websocket_connect.assert_called_once() + + client.websocket.closed = False hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() client.websocket.close.assert_called_once() From 5c2d769b547bba14177156b2baebba137e2908b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Sat, 21 Dec 2024 11:30:46 +0100 Subject: [PATCH 565/677] Enable AEMET data cache (#131226) --- homeassistant/components/aemet/__init__.py | 14 +++++++++++++- tests/components/aemet/test_init.py | 8 ++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/aemet/__init__.py b/homeassistant/components/aemet/__init__.py index 9ec52faec00..79dc3cc55ce 100644 --- a/homeassistant/components/aemet/__init__.py +++ b/homeassistant/components/aemet/__init__.py @@ -1,6 +1,7 @@ """The AEMET OpenData component.""" import logging +import shutil from aemet_opendata.exceptions import AemetError, TownNotFound from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature @@ -10,8 +11,9 @@ from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CON from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.storage import STORAGE_DIR -from .const import CONF_STATION_UPDATES, PLATFORMS +from .const import CONF_STATION_UPDATES, DOMAIN, PLATFORMS from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -29,6 +31,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo options = ConnectionOptions(api_key, update_features) aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options) + aemet.set_api_data_dir(hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}")) + try: await aemet.select_coordinates(latitude, longitude) except TownNotFound as err: @@ -57,3 +61,11 @@ async def async_update_options(hass: HomeAssistant, entry: ConfigEntry) -> None: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Remove a config entry.""" + await hass.async_add_executor_job( + shutil.rmtree, + hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}"), + ) diff --git a/tests/components/aemet/test_init.py b/tests/components/aemet/test_init.py index cf3204782cd..d6229438582 100644 --- a/tests/components/aemet/test_init.py +++ b/tests/components/aemet/test_init.py @@ -9,6 +9,7 @@ from homeassistant.components.aemet.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .util import mock_api_call @@ -24,6 +25,7 @@ CONFIG = { async def test_unload_entry( hass: HomeAssistant, + entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, ) -> None: """Test (un)loading the AEMET integration.""" @@ -47,6 +49,12 @@ async def test_unload_entry( await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await hass.config_entries.async_remove(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("weather.aemet") is None + assert entity_registry.async_get("weather.aemet") is None + async def test_init_town_not_found( hass: HomeAssistant, From 7326555f03fd4801536d381923d1681962f65218 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 11:38:33 +0100 Subject: [PATCH 566/677] Add diagnostic to Peblar Rocksolid EV Chargers integration (#133706) --- .../components/peblar/diagnostics.py | 23 +++++ .../components/peblar/quality_scale.yaml | 2 +- tests/components/peblar/conftest.py | 30 +++++- .../peblar/fixtures/available_versions.json | 4 + .../peblar/fixtures/current_versions.json | 4 + tests/components/peblar/fixtures/meter.json | 14 +++ .../peblar/snapshots/test_diagnostics.ambr | 93 +++++++++++++++++++ tests/components/peblar/test_diagnostics.py | 22 +++++ 8 files changed, 190 insertions(+), 2 deletions(-) create mode 100644 homeassistant/components/peblar/diagnostics.py create mode 100644 tests/components/peblar/fixtures/available_versions.json create mode 100644 tests/components/peblar/fixtures/current_versions.json create mode 100644 tests/components/peblar/fixtures/meter.json create mode 100644 tests/components/peblar/snapshots/test_diagnostics.ambr create mode 100644 tests/components/peblar/test_diagnostics.py diff --git a/homeassistant/components/peblar/diagnostics.py b/homeassistant/components/peblar/diagnostics.py new file mode 100644 index 00000000000..91cdb5dc811 --- /dev/null +++ b/homeassistant/components/peblar/diagnostics.py @@ -0,0 +1,23 @@ +"""Diagnostics support for Peblar.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from .coordinator import PeblarConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: PeblarConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + return { + "system_information": entry.runtime_data.system_information.to_dict(), + "meter": entry.runtime_data.meter_coordinator.data.to_dict(), + "versions": { + "available": entry.runtime_data.version_coordinator.data.available.to_dict(), + "current": entry.runtime_data.version_coordinator.data.current.to_dict(), + }, + } diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index 51bd60cc4b4..3dc470ce76b 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -40,7 +40,7 @@ rules: test-coverage: todo # Gold devices: todo - diagnostics: todo + diagnostics: done discovery-update-info: todo discovery: todo docs-data-update: todo diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index 583b2cbe7a5..ece9a8d9973 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -5,11 +5,12 @@ from __future__ import annotations from collections.abc import Generator from unittest.mock import MagicMock, patch -from peblar.models import PeblarSystemInformation +from peblar import PeblarMeter, PeblarSystemInformation, PeblarVersions import pytest from homeassistant.components.peblar.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -43,7 +44,34 @@ def mock_peblar() -> Generator[MagicMock]: patch("homeassistant.components.peblar.config_flow.Peblar", new=peblar_mock), ): peblar = peblar_mock.return_value + peblar.available_versions.return_value = PeblarVersions.from_json( + load_fixture("available_versions.json", DOMAIN) + ) + peblar.current_versions.return_value = PeblarVersions.from_json( + load_fixture("current_versions.json", DOMAIN) + ) peblar.system_information.return_value = PeblarSystemInformation.from_json( load_fixture("system_information.json", DOMAIN) ) + + api = peblar.rest_api.return_value + api.meter.return_value = PeblarMeter.from_json( + load_fixture("meter.json", DOMAIN) + ) + yield peblar + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, +) -> MockConfigEntry: + """Set up the Peblar integration for testing.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry diff --git a/tests/components/peblar/fixtures/available_versions.json b/tests/components/peblar/fixtures/available_versions.json new file mode 100644 index 00000000000..45b3255167c --- /dev/null +++ b/tests/components/peblar/fixtures/available_versions.json @@ -0,0 +1,4 @@ +{ + "Customization": "Peblar-1.9", + "Firmware": "1.6.2+1+WL-1" +} diff --git a/tests/components/peblar/fixtures/current_versions.json b/tests/components/peblar/fixtures/current_versions.json new file mode 100644 index 00000000000..c54fb71c457 --- /dev/null +++ b/tests/components/peblar/fixtures/current_versions.json @@ -0,0 +1,4 @@ +{ + "Customization": "Peblar-1.9", + "Firmware": "1.6.1+1+WL-1" +} diff --git a/tests/components/peblar/fixtures/meter.json b/tests/components/peblar/fixtures/meter.json new file mode 100644 index 00000000000..1f32a3fbebc --- /dev/null +++ b/tests/components/peblar/fixtures/meter.json @@ -0,0 +1,14 @@ +{ + "CurrentPhase1": 0, + "CurrentPhase2": 0, + "CurrentPhase3": 0, + "EnergySession": 0, + "EnergyTotal": 880321, + "PowerPhase1": 0, + "PowerPhase2": 0, + "PowerPhase3": 0, + "PowerTotal": 0, + "VoltagePhase1": 230, + "VoltagePhase2": null, + "VoltagePhase3": null +} diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..7701c1eb159 --- /dev/null +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -0,0 +1,93 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'meter': dict({ + 'CurrentPhase1': 0, + 'CurrentPhase2': 0, + 'CurrentPhase3': 0, + 'EnergySession': 0, + 'EnergyTotal': 880321, + 'PowerPhase1': 0, + 'PowerPhase2': 0, + 'PowerPhase3': 0, + 'PowerTotal': 0, + 'VoltagePhase1': 230, + }), + 'system_information': dict({ + 'BopCalIGainA': 264625, + 'BopCalIGainB': 267139, + 'BopCalIGainC': 239155, + 'CanChangeChargingPhases': False, + 'CanChargeSinglePhase': True, + 'CanChargeThreePhases': False, + 'CustomerId': 'PBLR-0000645', + 'CustomerUpdatePackagePubKey': ''' + -----BEGIN PUBLIC KEY----- + lorem ipsum + -----END PUBLIC KEY----- + + ''', + 'EthMacAddr': '00:0F:11:58:86:97', + 'FwIdent': '1.6.1+1+WL-1', + 'Hostname': 'PBLR-0000645', + 'HwFixedCableRating': 20, + 'HwFwCompat': 'wlac-2', + 'HwHas4pRelay': False, + 'HwHasBop': True, + 'HwHasBuzzer': True, + 'HwHasDualSocket': False, + 'HwHasEichrechtLaserMarking': False, + 'HwHasEthernet': True, + 'HwHasLed': True, + 'HwHasLte': False, + 'HwHasMeter': True, + 'HwHasMeterDisplay': True, + 'HwHasPlc': False, + 'HwHasRfid': True, + 'HwHasRs485': True, + 'HwHasShutter': False, + 'HwHasSocket': False, + 'HwHasTpm': False, + 'HwHasWlan': True, + 'HwMaxCurrent': 16, + 'HwOneOrThreePhase': 3, + 'HwUKCompliant': False, + 'MainboardPn': '6004-2300-7600', + 'MainboardSn': '23-38-A4E-2MC', + 'MeterCalIGainA': 267369, + 'MeterCalIGainB': 228286, + 'MeterCalIGainC': 246455, + 'MeterCalIRmsOffsetA': 15573, + 'MeterCalIRmsOffsetB': 268422963, + 'MeterCalIRmsOffsetC': 9082, + 'MeterCalPhaseA': 250, + 'MeterCalPhaseB': 271, + 'MeterCalPhaseC': 271, + 'MeterCalVGainA': 250551, + 'MeterCalVGainB': 246074, + 'MeterCalVGainC': 230191, + 'MeterFwIdent': 'b9cbcd', + 'NorFlash': 'True', + 'ProductModelName': 'WLAC1-H11R0WE0ICR00', + 'ProductPn': '6004-2300-8002', + 'ProductSn': '23-45-A4O-MOF', + 'ProductVendorName': 'Peblar', + 'WlanApMacAddr': '00:0F:11:58:86:98', + 'WlanStaMacAddr': '00:0F:11:58:86:99', + }), + 'versions': dict({ + 'available': dict({ + 'Customization': 'Peblar-1.9', + 'Firmware': '1.6.2+1+WL-1', + 'customization_version': '1.9', + 'firmware_version': '1.6.2', + }), + 'current': dict({ + 'Customization': 'Peblar-1.9', + 'Firmware': '1.6.1+1+WL-1', + 'customization_version': '1.9', + 'firmware_version': '1.6.1', + }), + }), + }) +# --- diff --git a/tests/components/peblar/test_diagnostics.py b/tests/components/peblar/test_diagnostics.py new file mode 100644 index 00000000000..11f9af28b2d --- /dev/null +++ b/tests/components/peblar/test_diagnostics.py @@ -0,0 +1,22 @@ +"""Tests for the diagnostics data provided by the Peblar integration.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot + ) From aad1d6a25d0adb8d2170a225ac7683c8418fd0e9 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 21 Dec 2024 11:19:11 +0000 Subject: [PATCH 567/677] Use MAC address in Twinkly `DeviceInfo.connections` (#133708) --- homeassistant/components/twinkly/light.py | 4 +++- tests/components/twinkly/__init__.py | 2 ++ tests/components/twinkly/snapshots/test_diagnostics.ambr | 1 + 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/twinkly/light.py b/homeassistant/components/twinkly/light.py index 6f6dffe63d2..771af2282dc 100644 --- a/homeassistant/components/twinkly/light.py +++ b/homeassistant/components/twinkly/light.py @@ -28,7 +28,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( @@ -97,6 +97,7 @@ class TwinklyLight(LightEntity): # They are expected to be updated using the device_info. self._name = conf.data[CONF_NAME] or "Twinkly light" self._model = conf.data[CONF_MODEL] + self._mac = device_info["mac"] self._client = client @@ -114,6 +115,7 @@ class TwinklyLight(LightEntity): """Get device specific attributes.""" return DeviceInfo( identifiers={(DOMAIN, self._attr_unique_id)}, + connections={(CONNECTION_NETWORK_MAC, self._mac)}, manufacturer="LEDWORKS", model=self._model, name=self._name, diff --git a/tests/components/twinkly/__init__.py b/tests/components/twinkly/__init__.py index f322004962a..192a5c0e220 100644 --- a/tests/components/twinkly/__init__.py +++ b/tests/components/twinkly/__init__.py @@ -7,6 +7,7 @@ from homeassistant.components.twinkly.const import DEV_NAME TEST_HOST = "test.twinkly.com" TEST_ID = "twinkly_test_device_id" TEST_UID = "4c8fccf5-e08a-4173-92d5-49bf479252a2" +TEST_MAC = "aa:bb:cc:dd:ee:ff" TEST_NAME = "twinkly_test_device_name" TEST_NAME_ORIGINAL = "twinkly_test_original_device_name" # the original (deprecated) name stored in the conf TEST_MODEL = "twinkly_test_device_model" @@ -31,6 +32,7 @@ class ClientMock: self.device_info = { "uuid": self.id, "device_name": TEST_NAME, + "mac": TEST_MAC, "product_code": TEST_MODEL, } diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index 28ec98cf572..4d25e222501 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -15,6 +15,7 @@ }), 'device_info': dict({ 'device_name': 'twinkly_test_device_name', + 'mac': '**REDACTED**', 'product_code': 'twinkly_test_device_model', 'uuid': '4c8fccf5-e08a-4173-92d5-49bf479252a2', }), From dbe04f17ad4156162fc5d1ec9716804b220b6484 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 12:20:03 +0100 Subject: [PATCH 568/677] Add sensors tests for Peblar Rocksolid EV Chargers (#133710) --- tests/components/peblar/conftest.py | 11 +++- .../peblar/snapshots/test_sensor.ambr | 58 +++++++++++++++++++ tests/components/peblar/test_sensor.py | 35 +++++++++++ 3 files changed, 102 insertions(+), 2 deletions(-) create mode 100644 tests/components/peblar/snapshots/test_sensor.ambr create mode 100644 tests/components/peblar/test_sensor.py diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index ece9a8d9973..2db28d3a7e6 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Generator +from contextlib import nullcontext from unittest.mock import MagicMock, patch from peblar import PeblarMeter, PeblarSystemInformation, PeblarVersions @@ -67,11 +68,17 @@ async def init_integration( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_peblar: MagicMock, + request: pytest.FixtureRequest, ) -> MockConfigEntry: """Set up the Peblar integration for testing.""" mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + context = nullcontext() + if platform := getattr(request, "param", None): + context = patch("homeassistant.components.peblar.PLATFORMS", [platform]) + + with context: + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() return mock_config_entry diff --git a/tests/components/peblar/snapshots/test_sensor.ambr b/tests/components/peblar/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..29a5d7f7dd1 --- /dev/null +++ b/tests/components/peblar/snapshots/test_sensor.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_entities[sensor][sensor.peblar_ev_charger_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.peblar_ev_charger_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Peblar EV Charger Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '880.321', + }) +# --- diff --git a/tests/components/peblar/test_sensor.py b/tests/components/peblar/test_sensor.py new file mode 100644 index 00000000000..e2a49942cd5 --- /dev/null +++ b/tests/components/peblar/test_sensor.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar sensor platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.SENSOR], indirect=True) +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the sensor entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From b106b88f5c5aeb5444a5ea180e4786ae793bae22 Mon Sep 17 00:00:00 2001 From: Manu <4445816+tr4nt0r@users.noreply.github.com> Date: Sat, 21 Dec 2024 12:21:11 +0100 Subject: [PATCH 569/677] Adjust freezer tick in settings tests of IronOS integration (#133707) --- tests/components/iron_os/test_init.py | 4 ++-- tests/components/iron_os/test_number.py | 2 +- tests/components/iron_os/test_select.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py index 15327c55121..4749e1b6199 100644 --- a/tests/components/iron_os/test_init.py +++ b/tests/components/iron_os/test_init.py @@ -61,7 +61,7 @@ async def test_setup_config_entry_not_ready( config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=60)) + freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() @@ -81,7 +81,7 @@ async def test_settings_exception( config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - freezer.tick(timedelta(seconds=60)) + freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py index e0617a5012f..088b66feb64 100644 --- a/tests/components/iron_os/test_number.py +++ b/tests/components/iron_os/test_number.py @@ -50,7 +50,7 @@ async def test_state( assert config_entry.state is ConfigEntryState.LOADED - freezer.tick(timedelta(seconds=60)) + freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) diff --git a/tests/components/iron_os/test_select.py b/tests/components/iron_os/test_select.py index 5e981e1618e..cfd4d8ecbb1 100644 --- a/tests/components/iron_os/test_select.py +++ b/tests/components/iron_os/test_select.py @@ -61,7 +61,7 @@ async def test_state( assert config_entry.state is ConfigEntryState.LOADED - freezer.tick(timedelta(seconds=60)) + freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) From 6314d7a44c3fa37106fbfbabd85b2e3580d6e840 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Sat, 21 Dec 2024 12:31:17 +0100 Subject: [PATCH 570/677] Fix section translations check (#133683) --- tests/components/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index e95147b8664..534c471bf83 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -684,7 +684,7 @@ async def _check_step_or_section_translations( description_placeholders, data_value.schema, ) - return + continue iqs_config_flow = _get_integration_quality_scale_rule( integration, "config-flow" ) From 5665abf99168f2e4951d8093342b570cc2a4bc8b Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sat, 21 Dec 2024 11:31:40 +0000 Subject: [PATCH 571/677] Store Twinkly runtime data in config entry (#133714) --- homeassistant/components/twinkly/__init__.py | 42 +++++++++++-------- homeassistant/components/twinkly/const.py | 3 -- .../components/twinkly/diagnostics.py | 10 ++--- homeassistant/components/twinkly/light.py | 40 ++++++------------ 4 files changed, 41 insertions(+), 54 deletions(-) diff --git a/homeassistant/components/twinkly/__init__.py b/homeassistant/components/twinkly/__init__.py index b09e58ff12f..00e40d604c0 100644 --- a/homeassistant/components/twinkly/__init__.py +++ b/homeassistant/components/twinkly/__init__.py @@ -1,29 +1,40 @@ """The twinkly component.""" +from dataclasses import dataclass +from typing import Any + from aiohttp import ClientError from ttls.client import Twinkly from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_SW_VERSION, CONF_HOST, Platform +from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import ATTR_VERSION, DATA_CLIENT, DATA_DEVICE_INFO, DOMAIN +from .const import ATTR_VERSION PLATFORMS = [Platform.LIGHT] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up entries from config flow.""" - hass.data.setdefault(DOMAIN, {}) +@dataclass +class TwinklyData: + """Data for Twinkly integration.""" + client: Twinkly + device_info: dict[str, Any] + sw_version: str | None + + +type TwinklyConfigEntry = ConfigEntry[TwinklyData] + + +async def async_setup_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> bool: + """Set up entries from config flow.""" # We setup the client here so if at some point we add any other entity for this device, # we will be able to properly share the connection. host = entry.data[CONF_HOST] - hass.data[DOMAIN].setdefault(entry.entry_id, {}) - client = Twinkly(host, async_get_clientsession(hass)) try: @@ -32,21 +43,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except (TimeoutError, ClientError) as exception: raise ConfigEntryNotReady from exception - hass.data[DOMAIN][entry.entry_id] = { - DATA_CLIENT: client, - DATA_DEVICE_INFO: device_info, - ATTR_SW_VERSION: software_version.get(ATTR_VERSION), - } + entry.runtime_data = TwinklyData( + client, device_info, software_version.get(ATTR_VERSION) + ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> bool: """Remove a twinkly entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/twinkly/const.py b/homeassistant/components/twinkly/const.py index f33024ed156..488b213b895 100644 --- a/homeassistant/components/twinkly/const.py +++ b/homeassistant/components/twinkly/const.py @@ -15,8 +15,5 @@ DEV_LED_PROFILE = "led_profile" DEV_PROFILE_RGB = "RGB" DEV_PROFILE_RGBW = "RGBW" -DATA_CLIENT = "client" -DATA_DEVICE_INFO = "device_info" - # Minimum version required to support effects MIN_EFFECT_VERSION = "2.7.1" diff --git a/homeassistant/components/twinkly/diagnostics.py b/homeassistant/components/twinkly/diagnostics.py index e188e92ecd5..9ddc65cf255 100644 --- a/homeassistant/components/twinkly/diagnostics.py +++ b/homeassistant/components/twinkly/diagnostics.py @@ -6,18 +6,18 @@ from typing import Any from homeassistant.components.diagnostics import async_redact_data from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_SW_VERSION, CONF_HOST, CONF_IP_ADDRESS, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .const import DATA_DEVICE_INFO, DOMAIN +from . import TwinklyConfigEntry +from .const import DOMAIN TO_REDACT = [CONF_HOST, CONF_IP_ADDRESS, CONF_MAC] async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: TwinklyConfigEntry ) -> dict[str, Any]: """Return diagnostics for a Twinkly config entry.""" attributes = None @@ -34,8 +34,8 @@ async def async_get_config_entry_diagnostics( return async_redact_data( { "entry": entry.as_dict(), - "device_info": hass.data[DOMAIN][entry.entry_id][DATA_DEVICE_INFO], - ATTR_SW_VERSION: hass.data[DOMAIN][entry.entry_id][ATTR_SW_VERSION], + "device_info": entry.runtime_data.device_info, + ATTR_SW_VERSION: entry.runtime_data.sw_version, "attributes": attributes, }, TO_REDACT, diff --git a/homeassistant/components/twinkly/light.py b/homeassistant/components/twinkly/light.py index 771af2282dc..d05da7bab15 100644 --- a/homeassistant/components/twinkly/light.py +++ b/homeassistant/components/twinkly/light.py @@ -7,7 +7,6 @@ from typing import Any from aiohttp import ClientError from awesomeversion import AwesomeVersion -from ttls.client import Twinkly from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -18,22 +17,14 @@ from homeassistant.components.light import ( LightEntity, LightEntityFeature, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_SW_VERSION, - CONF_HOST, - CONF_ID, - CONF_MODEL, - CONF_NAME, -) +from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import TwinklyConfigEntry from .const import ( - DATA_CLIENT, - DATA_DEVICE_INFO, DEV_LED_PROFILE, DEV_MODEL, DEV_NAME, @@ -48,16 +39,11 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: TwinklyConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Setups an entity from a config entry (UI config flow).""" - - client = hass.data[DOMAIN][config_entry.entry_id][DATA_CLIENT] - device_info = hass.data[DOMAIN][config_entry.entry_id][DATA_DEVICE_INFO] - software_version = hass.data[DOMAIN][config_entry.entry_id][ATTR_SW_VERSION] - - entity = TwinklyLight(config_entry, client, device_info, software_version) + entity = TwinklyLight(config_entry) async_add_entities([entity], update_before_add=True) @@ -71,14 +57,12 @@ class TwinklyLight(LightEntity): def __init__( self, - conf: ConfigEntry, - client: Twinkly, - device_info, - software_version: str | None = None, + entry: TwinklyConfigEntry, ) -> None: """Initialize a TwinklyLight entity.""" - self._attr_unique_id: str = conf.data[CONF_ID] - self._conf = conf + self._attr_unique_id: str = entry.data[CONF_ID] + device_info = entry.runtime_data.device_info + self._conf = entry if device_info.get(DEV_LED_PROFILE) == DEV_PROFILE_RGBW: self._attr_supported_color_modes = {ColorMode.RGBW} @@ -95,18 +79,18 @@ class TwinklyLight(LightEntity): # Those are saved in the config entry in order to have meaningful values even # if the device is currently offline. # They are expected to be updated using the device_info. - self._name = conf.data[CONF_NAME] or "Twinkly light" - self._model = conf.data[CONF_MODEL] + self._name = entry.data[CONF_NAME] or "Twinkly light" + self._model = entry.data[CONF_MODEL] self._mac = device_info["mac"] - self._client = client + self._client = entry.runtime_data.client # Set default state before any update self._attr_is_on = False self._attr_available = False self._current_movie: dict[Any, Any] = {} self._movies: list[Any] = [] - self._software_version = software_version + self._software_version = entry.runtime_data.sw_version # We guess that most devices are "new" and support effects self._attr_supported_features = LightEntityFeature.EFFECT From 7e24b353ac925be1ff3fd05e4f46ae4f983964ab Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 12:31:58 +0100 Subject: [PATCH 572/677] Add updates tests for Peblar Rocksolid EV Chargers (#133712) --- .../peblar/snapshots/test_update.ambr | 118 ++++++++++++++++++ tests/components/peblar/test_update.py | 35 ++++++ 2 files changed, 153 insertions(+) create mode 100644 tests/components/peblar/snapshots/test_update.ambr create mode 100644 tests/components/peblar/test_update.py diff --git a/tests/components/peblar/snapshots/test_update.ambr b/tests/components/peblar/snapshots/test_update.ambr new file mode 100644 index 00000000000..de8bb63150d --- /dev/null +++ b/tests/components/peblar/snapshots/test_update.ambr @@ -0,0 +1,118 @@ +# serializer version: 1 +# name: test_entities[update][update.peblar_ev_charger_customization-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.peblar_ev_charger_customization', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Customization', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'customization', + 'unique_id': '23-45-A4O-MOF_customization', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[update][update.peblar_ev_charger_customization-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/peblar/icon.png', + 'friendly_name': 'Peblar EV Charger Customization', + 'in_progress': False, + 'installed_version': 'Peblar-1.9', + 'latest_version': 'Peblar-1.9', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.peblar_ev_charger_customization', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entities[update][update.peblar_ev_charger_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.peblar_ev_charger_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_firmware', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[update][update.peblar_ev_charger_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/peblar/icon.png', + 'friendly_name': 'Peblar EV Charger Firmware', + 'in_progress': False, + 'installed_version': '1.6.1+1+WL-1', + 'latest_version': '1.6.2+1+WL-1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.peblar_ev_charger_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/peblar/test_update.py b/tests/components/peblar/test_update.py new file mode 100644 index 00000000000..7a772fbe96c --- /dev/null +++ b/tests/components/peblar/test_update.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar update platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.UPDATE], indirect=True) +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the update entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From 6b666b3a0f4bd3e2ac4365f021954ec9b551b956 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sat, 21 Dec 2024 12:44:00 +0100 Subject: [PATCH 573/677] Test color_temp updates are processed when an mqtt json light is turned off (#133715) --- tests/components/mqtt/test_light_json.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 988cce85653..c127c86de39 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -727,12 +727,12 @@ async def test_controlling_state_via_topic( async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON", "color_temp":155}') light_state = hass.states.get("light.test") - assert light_state.attributes.get("color_temp") == 155 + assert light_state.attributes.get("color_temp_kelvin") == 6451 # 155 mired async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON", "color_temp":null}') light_state = hass.states.get("light.test") - assert light_state.attributes.get("color_temp") is None + assert light_state.attributes.get("color_temp_kelvin") is None async_fire_mqtt_message( hass, "test_light_rgb", '{"state":"ON", "effect":"colorloop"}' @@ -763,11 +763,26 @@ async def test_controlling_state_via_topic( assert light_state.state == STATE_OFF assert light_state.attributes.get("brightness") is None + # Simulate the lights color temp has been changed + # while it was switched off + async_fire_mqtt_message( + hass, + "test_light_rgb", + '{"state":"OFF","color_temp":201}', + ) + light_state = hass.states.get("light.test") + assert light_state.state == STATE_OFF + # Color temp attribute is not exposed while the lamp is off + assert light_state.attributes.get("color_temp_kelvin") is None + # test previous zero brightness received was ignored and brightness is restored + # see if the latest color_temp value received is restored async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON"}') light_state = hass.states.get("light.test") assert light_state.attributes.get("brightness") == 128 + assert light_state.attributes.get("color_temp_kelvin") == 4975 # 201 mired + # A `0` brightness value is ignored when a light is turned on async_fire_mqtt_message(hass, "test_light_rgb", '{"state":"ON","brightness":0}') light_state = hass.states.get("light.test") assert light_state.attributes.get("brightness") == 128 From 11efec49dbdcbd572002dfe9e2c5f37893fca0d4 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sat, 21 Dec 2024 12:45:00 +0100 Subject: [PATCH 574/677] Fix test coverage in workday (#133616) --- .../components/workday/binary_sensor.py | 6 ++- .../components/workday/config_flow.py | 8 ++-- tests/components/workday/test_config_flow.py | 45 +++++++++++++++++++ 3 files changed, 55 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/workday/binary_sensor.py b/homeassistant/components/workday/binary_sensor.py index f4a2541a1d7..3684208f102 100644 --- a/homeassistant/components/workday/binary_sensor.py +++ b/homeassistant/components/workday/binary_sensor.py @@ -94,7 +94,11 @@ def _get_obj_holidays( language=language, categories=set_categories, ) - if (supported_languages := obj_holidays.supported_languages) and language == "en": + if ( + (supported_languages := obj_holidays.supported_languages) + and language + and language.startswith("en") + ): for lang in supported_languages: if lang.startswith("en"): obj_holidays = country_holidays( diff --git a/homeassistant/components/workday/config_flow.py b/homeassistant/components/workday/config_flow.py index 2036d685d31..895c7cd50e2 100644 --- a/homeassistant/components/workday/config_flow.py +++ b/homeassistant/components/workday/config_flow.py @@ -136,7 +136,7 @@ def validate_custom_dates(user_input: dict[str, Any]) -> None: year: int = dt_util.now().year if country := user_input.get(CONF_COUNTRY): - language = user_input.get(CONF_LANGUAGE) + language: str | None = user_input.get(CONF_LANGUAGE) province = user_input.get(CONF_PROVINCE) obj_holidays = country_holidays( country=country, @@ -145,8 +145,10 @@ def validate_custom_dates(user_input: dict[str, Any]) -> None: language=language, ) if ( - supported_languages := obj_holidays.supported_languages - ) and language == "en": + (supported_languages := obj_holidays.supported_languages) + and language + and language.startswith("en") + ): for lang in supported_languages: if lang.startswith("en"): obj_holidays = country_holidays( diff --git a/tests/components/workday/test_config_flow.py b/tests/components/workday/test_config_flow.py index 1bf0f176fe9..51d4b899d25 100644 --- a/tests/components/workday/test_config_flow.py +++ b/tests/components/workday/test_config_flow.py @@ -653,3 +653,48 @@ async def test_form_with_categories(hass: HomeAssistant) -> None: "language": "de", "category": ["half_day"], } + + +async def test_options_form_removes_subdiv(hass: HomeAssistant) -> None: + """Test we get the form in options when removing a configured subdivision.""" + + entry = await init_integration( + hass, + { + "name": "Workday Sensor", + "country": "DE", + "excludes": ["sat", "sun", "holiday"], + "days_offset": 0, + "workdays": ["mon", "tue", "wed", "thu", "fri"], + "add_holidays": [], + "remove_holidays": [], + "language": "de", + "province": "BW", + }, + ) + + result = await hass.config_entries.options.async_init(entry.entry_id) + + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + "excludes": ["sat", "sun", "holiday"], + "days_offset": 0, + "workdays": ["mon", "tue", "wed", "thu", "fri"], + "add_holidays": [], + "remove_holidays": [], + "language": "de", + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == { + "name": "Workday Sensor", + "country": "DE", + "excludes": ["sat", "sun", "holiday"], + "days_offset": 0, + "workdays": ["mon", "tue", "wed", "thu", "fri"], + "add_holidays": [], + "remove_holidays": [], + "language": "de", + } From a3fab094c3448030514ab3c9a0783ec13b38eede Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 13:03:44 +0100 Subject: [PATCH 575/677] Add device test for Peblar Rocksolid EV Chargers (#133713) --- .../peblar/snapshots/test_init.ambr | 41 +++++++++++++++++++ tests/components/peblar/test_init.py | 16 ++++++++ 2 files changed, 57 insertions(+) create mode 100644 tests/components/peblar/snapshots/test_init.ambr diff --git a/tests/components/peblar/snapshots/test_init.ambr b/tests/components/peblar/snapshots/test_init.ambr new file mode 100644 index 00000000000..ba79093b3ec --- /dev/null +++ b/tests/components/peblar/snapshots/test_init.ambr @@ -0,0 +1,41 @@ +# serializer version: 1 +# name: test_peblar_device_entry + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://127.0.0.127', + 'connections': set({ + tuple( + 'mac', + '00:0f:11:58:86:97', + ), + tuple( + 'mac', + '00:0f:11:58:86:99', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'peblar', + '23-45-A4O-MOF', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Peblar', + 'model': 'WLAC1-H11R0WE0ICR00', + 'model_id': '6004-2300-8002', + 'name': 'Peblar EV Charger', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '23-45-A4O-MOF', + 'suggested_area': None, + 'sw_version': '1.6.1+1+WL-1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/peblar/test_init.py b/tests/components/peblar/test_init.py index 78508501ba8..ca7b0d88c24 100644 --- a/tests/components/peblar/test_init.py +++ b/tests/components/peblar/test_init.py @@ -4,10 +4,12 @@ from unittest.mock import MagicMock from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.peblar.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from tests.common import MockConfigEntry @@ -67,3 +69,17 @@ async def test_config_entry_authentication_failed( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +@pytest.mark.usefixtures("init_integration") +async def test_peblar_device_entry( + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test authentication error, aborts setup.""" + assert ( + device_entry := device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + ) + assert device_entry == snapshot From a3fad89d0dbb87deef66924c3d4f50775e6e8e36 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sat, 21 Dec 2024 13:19:04 +0100 Subject: [PATCH 576/677] Use super constructor self.config_entry in enphase_envoy coordinator (#133718) --- homeassistant/components/enphase_envoy/coordinator.py | 10 +++++----- .../components/enphase_envoy/quality_scale.yaml | 7 +------ 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/enphase_envoy/coordinator.py b/homeassistant/components/enphase_envoy/coordinator.py index 386661402de..67f43ca64a8 100644 --- a/homeassistant/components/enphase_envoy/coordinator.py +++ b/homeassistant/components/enphase_envoy/coordinator.py @@ -37,6 +37,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): envoy_serial_number: str envoy_firmware: str + config_entry: EnphaseConfigEntry def __init__( self, hass: HomeAssistant, envoy: Envoy, entry: EnphaseConfigEntry @@ -44,7 +45,6 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Initialize DataUpdateCoordinator for the envoy.""" self.envoy = envoy entry_data = entry.data - self.entry = entry self.username = entry_data[CONF_USERNAME] self.password = entry_data[CONF_PASSWORD] self._setup_complete = False @@ -107,7 +107,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): await envoy.setup() assert envoy.serial_number is not None self.envoy_serial_number = envoy.serial_number - if token := self.entry.data.get(CONF_TOKEN): + if token := self.config_entry.data.get(CONF_TOKEN): with contextlib.suppress(*INVALID_AUTH_ERRORS): # Always set the username and password # so we can refresh the token if needed @@ -136,9 +136,9 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # as long as the token is valid _LOGGER.debug("%s: Updating token in config entry from auth", self.name) self.hass.config_entries.async_update_entry( - self.entry, + self.config_entry, data={ - **self.entry.data, + **self.config_entry.data, CONF_TOKEN: envoy.auth.token, }, ) @@ -189,7 +189,7 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) # reload the integration to get all established again self.hass.async_create_task( - self.hass.config_entries.async_reload(self.entry.entry_id) + self.hass.config_entries.async_reload(self.config_entry.entry_id) ) # remember firmware version for next time self.envoy_firmware = envoy.firmware diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 4b83c2886f7..8e096538f01 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -7,12 +7,7 @@ rules: status: done comment: fixed 1 minute cycle based on Enphase Envoy device characteristics brands: done - common-modules: - status: done - comment: | - In coordinator.py, you set self.entry = entry, while after the super constructor, - you can access the entry via self.config_entry (you would have to overwrite the - type to make sure you don't have to assert not None every time)done + common-modules: done config-flow-test-coverage: status: todo comment: | From dc9133f919dfbe76fe874625cc18dd2078d373b3 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 21 Dec 2024 13:26:09 +0100 Subject: [PATCH 577/677] Use mac address in Twinkly for unique id (#133717) --- homeassistant/components/twinkly/__init__.py | 42 +++++++++++++- .../components/twinkly/config_flow.py | 5 +- homeassistant/components/twinkly/light.py | 4 +- .../twinkly/snapshots/test_diagnostics.ambr | 4 +- tests/components/twinkly/test_init.py | 58 +++++++++++++++++-- tests/components/twinkly/test_light.py | 18 +++--- 6 files changed, 110 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/twinkly/__init__.py b/homeassistant/components/twinkly/__init__.py index 00e40d604c0..cd76a79e1d7 100644 --- a/homeassistant/components/twinkly/__init__.py +++ b/homeassistant/components/twinkly/__init__.py @@ -1,6 +1,7 @@ """The twinkly component.""" from dataclasses import dataclass +import logging from typing import Any from aiohttp import ClientError @@ -10,12 +11,15 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import ATTR_VERSION +from .const import ATTR_VERSION, DOMAIN PLATFORMS = [Platform.LIGHT] +_LOGGER = logging.getLogger(__name__) + @dataclass class TwinklyData: @@ -56,3 +60,39 @@ async def async_unload_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> """Remove a twinkly entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_migrate_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> bool: + """Migrate old entry.""" + if entry.minor_version == 1: + client = Twinkly(entry.data[CONF_HOST], async_get_clientsession(hass)) + try: + device_info = await client.get_details() + except (TimeoutError, ClientError) as exception: + _LOGGER.error("Error while migrating: %s", exception) + return False + identifier = entry.unique_id + assert identifier is not None + entity_registry = er.async_get(hass) + entity_id = entity_registry.async_get_entity_id("light", DOMAIN, identifier) + if entity_id: + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + entity_registry.async_update_entity( + entity_entry.entity_id, new_unique_id=device_info["mac"] + ) + device_registry = dr.async_get(hass) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, identifier)} + ) + if device_entry: + device_registry.async_update_device( + device_entry.id, new_identifiers={(DOMAIN, device_info["mac"])} + ) + hass.config_entries.async_update_entry( + entry, + unique_id=device_info["mac"], + minor_version=2, + ) + + return True diff --git a/homeassistant/components/twinkly/config_flow.py b/homeassistant/components/twinkly/config_flow.py index 837bd9ccb6a..4dec8809f07 100644 --- a/homeassistant/components/twinkly/config_flow.py +++ b/homeassistant/components/twinkly/config_flow.py @@ -23,6 +23,7 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): """Handle twinkly config flow.""" VERSION = 1 + MINOR_VERSION = 2 def __init__(self) -> None: """Initialize the config flow.""" @@ -46,7 +47,7 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_HOST] = "cannot_connect" else: await self.async_set_unique_id( - device_info[DEV_ID], raise_on_progress=False + device_info["mac"], raise_on_progress=False ) self._abort_if_unique_id_configured() @@ -64,7 +65,7 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): device_info = await Twinkly( discovery_info.ip, async_get_clientsession(self.hass) ).get_details() - await self.async_set_unique_id(device_info[DEV_ID]) + await self.async_set_unique_id(device_info["mac"]) self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip}) self._discovered_device = (device_info, discovery_info.ip) diff --git a/homeassistant/components/twinkly/light.py b/homeassistant/components/twinkly/light.py index d05da7bab15..7de07db3b30 100644 --- a/homeassistant/components/twinkly/light.py +++ b/homeassistant/components/twinkly/light.py @@ -60,8 +60,8 @@ class TwinklyLight(LightEntity): entry: TwinklyConfigEntry, ) -> None: """Initialize a TwinklyLight entity.""" - self._attr_unique_id: str = entry.data[CONF_ID] device_info = entry.runtime_data.device_info + self._attr_unique_id: str = device_info["mac"] self._conf = entry if device_info.get(DEV_LED_PROFILE) == DEV_PROFILE_RGBW: @@ -98,7 +98,7 @@ class TwinklyLight(LightEntity): def device_info(self) -> DeviceInfo | None: """Get device specific attributes.""" return DeviceInfo( - identifiers={(DOMAIN, self._attr_unique_id)}, + identifiers={(DOMAIN, self._mac)}, connections={(CONNECTION_NETWORK_MAC, self._mac)}, manufacturer="LEDWORKS", model=self._model, diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index 4d25e222501..abd923dcb83 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -32,14 +32,14 @@ }), 'domain': 'twinkly', 'entry_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', - 'minor_version': 1, + 'minor_version': 2, 'options': dict({ }), 'pref_disable_new_entities': False, 'pref_disable_polling': False, 'source': 'user', 'title': 'Twinkly', - 'unique_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', + 'unique_id': 'aa:bb:cc:dd:ee:ff', 'version': 1, }), 'sw_version': '2.8.10', diff --git a/tests/components/twinkly/test_init.py b/tests/components/twinkly/test_init.py index 6642807ac3f..60ebe65b445 100644 --- a/tests/components/twinkly/test_init.py +++ b/tests/components/twinkly/test_init.py @@ -1,14 +1,16 @@ -"""Tests of the initialization of the twinly integration.""" +"""Tests of the initialization of the twinkly integration.""" from unittest.mock import patch from uuid import uuid4 -from homeassistant.components.twinkly.const import DOMAIN as TWINKLY_DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.twinkly.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er -from . import TEST_HOST, TEST_MODEL, TEST_NAME_ORIGINAL, ClientMock +from . import TEST_HOST, TEST_MAC, TEST_MODEL, TEST_NAME_ORIGINAL, ClientMock from tests.common import MockConfigEntry @@ -19,7 +21,7 @@ async def test_load_unload_entry(hass: HomeAssistant) -> None: device_id = str(uuid4()) config_entry = MockConfigEntry( - domain=TWINKLY_DOMAIN, + domain=DOMAIN, data={ CONF_HOST: TEST_HOST, CONF_ID: device_id, @@ -27,6 +29,8 @@ async def test_load_unload_entry(hass: HomeAssistant) -> None: CONF_MODEL: TEST_MODEL, }, entry_id=device_id, + unique_id=TEST_MAC, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -47,13 +51,15 @@ async def test_config_entry_not_ready(hass: HomeAssistant) -> None: client.is_offline = True config_entry = MockConfigEntry( - domain=TWINKLY_DOMAIN, + domain=DOMAIN, data={ CONF_HOST: TEST_HOST, CONF_ID: id, CONF_NAME: TEST_NAME_ORIGINAL, CONF_MODEL: TEST_MODEL, }, + minor_version=2, + unique_id=TEST_MAC, ) config_entry.add_to_hass(hass) @@ -62,3 +68,45 @@ async def test_config_entry_not_ready(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_mac_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Validate that the unique_id is migrated to the MAC address.""" + client = ClientMock() + + config_entry = MockConfigEntry( + domain=DOMAIN, + minor_version=1, + unique_id="unique_id", + data={ + CONF_HOST: TEST_HOST, + CONF_ID: id, + CONF_NAME: TEST_NAME_ORIGINAL, + CONF_MODEL: TEST_MODEL, + }, + ) + config_entry.add_to_hass(hass) + entity_entry = entity_registry.async_get_or_create( + LIGHT_DOMAIN, + DOMAIN, + config_entry.unique_id, + ) + device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, config_entry.unique_id)}, + ) + + with patch("homeassistant.components.twinkly.Twinkly", return_value=client): + await hass.config_entries.async_setup(config_entry.entry_id) + + assert config_entry.state is ConfigEntryState.LOADED + + assert entity_registry.async_get(entity_entry.entity_id).unique_id == TEST_MAC + assert device_registry.async_get_device( + identifiers={(DOMAIN, config_entry.unique_id)} + ).identifiers == {(DOMAIN, TEST_MAC)} + assert config_entry.unique_id == TEST_MAC diff --git a/tests/components/twinkly/test_light.py b/tests/components/twinkly/test_light.py index 7a55dbec14a..26df83aebe0 100644 --- a/tests/components/twinkly/test_light.py +++ b/tests/components/twinkly/test_light.py @@ -15,7 +15,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceEntry from homeassistant.helpers.entity_registry import RegistryEntry -from . import TEST_MODEL, TEST_NAME, TEST_NAME_ORIGINAL, ClientMock +from . import TEST_MAC, TEST_MODEL, TEST_NAME, TEST_NAME_ORIGINAL, ClientMock from tests.common import MockConfigEntry, async_fire_time_changed @@ -301,7 +301,7 @@ async def test_update_name( async_fire_time_changed(hass) await hass.async_block_till_done() - dev_entry = device_registry.async_get_device({(TWINKLY_DOMAIN, client.id)}) + dev_entry = device_registry.async_get_device({(TWINKLY_DOMAIN, TEST_MAC)}) assert dev_entry.name == "new_device_name" assert config_entry.data[CONF_NAME] == "new_device_name" @@ -310,10 +310,9 @@ async def test_update_name( async def test_unload(hass: HomeAssistant) -> None: """Validate that entities can be unloaded from the UI.""" - _, _, client, _ = await _create_entries(hass) - entry_id = client.id + _, _, _, entry = await _create_entries(hass) - assert await hass.config_entries.async_unload(entry_id) + assert await hass.config_entries.async_unload(entry.entry_id) async def _create_entries( @@ -330,18 +329,19 @@ async def _create_entries( CONF_NAME: TEST_NAME_ORIGINAL, CONF_MODEL: TEST_MODEL, }, - entry_id=client.id, + unique_id=TEST_MAC, + minor_version=2, ) config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(client.id) + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() device_registry = dr.async_get(hass) entity_registry = er.async_get(hass) - entity_id = entity_registry.async_get_entity_id("light", TWINKLY_DOMAIN, client.id) + entity_id = entity_registry.async_get_entity_id("light", TWINKLY_DOMAIN, TEST_MAC) entity_entry = entity_registry.async_get(entity_id) - device = device_registry.async_get_device(identifiers={(TWINKLY_DOMAIN, client.id)}) + device = device_registry.async_get_device(identifiers={(TWINKLY_DOMAIN, TEST_MAC)}) assert entity_entry is not None assert device is not None From 5abc03c21ebdeccea1678cd4721ae01fa09e3673 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 13:26:48 +0100 Subject: [PATCH 578/677] Fix spelling of "Gateway PIN" and remove two excessive spaces (#133716) --- homeassistant/components/overkiz/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/strings.json b/homeassistant/components/overkiz/strings.json index a756df4d0d6..1595cd52aeb 100644 --- a/homeassistant/components/overkiz/strings.json +++ b/homeassistant/components/overkiz/strings.json @@ -22,7 +22,7 @@ } }, "local": { - "description": "By activating the [Developer Mode of your TaHoma box](https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started), you can authorize third-party software (like Home Assistant) to connect to it via your local network. \n\n After activation, enter your application credentials and change the host to include your gateway-pin or enter the IP address of your gateway.", + "description": "By activating the [Developer Mode of your TaHoma box](https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started), you can authorize third-party software (like Home Assistant) to connect to it via your local network.\n\nAfter activation, enter your application credentials and change the host to include your Gateway PIN or enter the IP address of your gateway.", "data": { "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", From a3febc4449375868c8968350262b8dd84170e76b Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 14:23:16 +0100 Subject: [PATCH 579/677] Add select platform to Peblar Rocksolid EV Chargers integration (#133720) --- homeassistant/components/peblar/__init__.py | 9 +- .../components/peblar/coordinator.py | 38 +++++++- .../components/peblar/diagnostics.py | 1 + homeassistant/components/peblar/icons.json | 11 +++ homeassistant/components/peblar/select.py | 95 +++++++++++++++++++ homeassistant/components/peblar/strings.json | 12 +++ tests/components/peblar/conftest.py | 10 +- .../peblar/fixtures/user_configuration.json | 59 ++++++++++++ .../peblar/snapshots/test_diagnostics.ambr | 61 ++++++++++++ .../peblar/snapshots/test_select.ambr | 62 ++++++++++++ tests/components/peblar/test_select.py | 35 +++++++ 11 files changed, 389 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/peblar/select.py create mode 100644 tests/components/peblar/fixtures/user_configuration.json create mode 100644 tests/components/peblar/snapshots/test_select.ambr create mode 100644 tests/components/peblar/test_select.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index d1da6ce83b7..79ffd236f32 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -24,10 +24,12 @@ from .coordinator import ( PeblarConfigEntry, PeblarMeterDataUpdateCoordinator, PeblarRuntimeData, + PeblarUserConfigurationDataUpdateCoordinator, PeblarVersionDataUpdateCoordinator, ) PLATFORMS = [ + Platform.SELECT, Platform.SENSOR, Platform.UPDATE, ] @@ -56,16 +58,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo # Setup the data coordinators meter_coordinator = PeblarMeterDataUpdateCoordinator(hass, entry, api) + user_configuration_coordinator = PeblarUserConfigurationDataUpdateCoordinator( + hass, entry, peblar + ) version_coordinator = PeblarVersionDataUpdateCoordinator(hass, entry, peblar) await asyncio.gather( meter_coordinator.async_config_entry_first_refresh(), + user_configuration_coordinator.async_config_entry_first_refresh(), version_coordinator.async_config_entry_first_refresh(), ) # Store the runtime data entry.runtime_data = PeblarRuntimeData( - system_information=system_information, meter_coordinator=meter_coordinator, + system_information=system_information, + user_configuraton_coordinator=user_configuration_coordinator, version_coordinator=version_coordinator, ) diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index f83ed8f4dda..a01e3d6b41a 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -5,7 +5,14 @@ from __future__ import annotations from dataclasses import dataclass from datetime import timedelta -from peblar import Peblar, PeblarApi, PeblarError, PeblarMeter, PeblarVersions +from peblar import ( + Peblar, + PeblarApi, + PeblarError, + PeblarMeter, + PeblarUserConfiguration, + PeblarVersions, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -19,8 +26,9 @@ from .const import LOGGER class PeblarRuntimeData: """Class to hold runtime data.""" - system_information: PeblarSystemInformation meter_coordinator: PeblarMeterDataUpdateCoordinator + system_information: PeblarSystemInformation + user_configuraton_coordinator: PeblarUserConfigurationDataUpdateCoordinator version_coordinator: PeblarVersionDataUpdateCoordinator @@ -86,3 +94,29 @@ class PeblarMeterDataUpdateCoordinator(DataUpdateCoordinator[PeblarMeter]): return await self.api.meter() except PeblarError as err: raise UpdateFailed(err) from err + + +class PeblarUserConfigurationDataUpdateCoordinator( + DataUpdateCoordinator[PeblarUserConfiguration] +): + """Class to manage fetching Peblar user configuration data.""" + + def __init__( + self, hass: HomeAssistant, entry: PeblarConfigEntry, peblar: Peblar + ) -> None: + """Initialize the coordinator.""" + self.peblar = peblar + super().__init__( + hass, + LOGGER, + config_entry=entry, + name=f"Peblar {entry.title} user configuration", + update_interval=timedelta(minutes=5), + ) + + async def _async_update_data(self) -> PeblarUserConfiguration: + """Fetch data from the Peblar device.""" + try: + return await self.peblar.user_configuration() + except PeblarError as err: + raise UpdateFailed(err) from err diff --git a/homeassistant/components/peblar/diagnostics.py b/homeassistant/components/peblar/diagnostics.py index 91cdb5dc811..6c4531c0e09 100644 --- a/homeassistant/components/peblar/diagnostics.py +++ b/homeassistant/components/peblar/diagnostics.py @@ -15,6 +15,7 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" return { "system_information": entry.runtime_data.system_information.to_dict(), + "user_configuration": entry.runtime_data.user_configuraton_coordinator.data.to_dict(), "meter": entry.runtime_data.meter_coordinator.data.to_dict(), "versions": { "available": entry.runtime_data.version_coordinator.data.available.to_dict(), diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json index 073cd08a2c7..b052eb6de4d 100644 --- a/homeassistant/components/peblar/icons.json +++ b/homeassistant/components/peblar/icons.json @@ -1,5 +1,16 @@ { "entity": { + "select": { + "smart_charging": { + "default": "mdi:lightning-bolt", + "state": { + "fast_solar": "mdi:solar-power", + "pure_solar": "mdi:solar-power-variant", + "scheduled": "mdi:calendar-clock", + "smart_solar": "mdi:solar-power" + } + } + }, "update": { "customization": { "default": "mdi:palette" diff --git a/homeassistant/components/peblar/select.py b/homeassistant/components/peblar/select.py new file mode 100644 index 00000000000..95a87248804 --- /dev/null +++ b/homeassistant/components/peblar/select.py @@ -0,0 +1,95 @@ +"""Support for Peblar selects.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from peblar import Peblar, PeblarUserConfiguration, SmartChargingMode + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator + + +@dataclass(frozen=True, kw_only=True) +class PeblarSelectEntityDescription(SelectEntityDescription): + """Class describing Peblar select entities.""" + + current_fn: Callable[[PeblarUserConfiguration], str | None] + select_fn: Callable[[Peblar, str], Awaitable[Any]] + + +DESCRIPTIONS = [ + PeblarSelectEntityDescription( + key="smart_charging", + translation_key="smart_charging", + entity_category=EntityCategory.CONFIG, + options=[ + "default", + "fast_solar", + "pure_solar", + "scheduled", + "smart_solar", + ], + current_fn=lambda x: x.smart_charging.value if x.smart_charging else None, + select_fn=lambda x, mode: x.smart_charging(SmartChargingMode(mode)), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar select based on a config entry.""" + async_add_entities( + PeblarSelectEntity( + entry=entry, + description=description, + ) + for description in DESCRIPTIONS + ) + + +class PeblarSelectEntity( + CoordinatorEntity[PeblarUserConfigurationDataUpdateCoordinator], SelectEntity +): + """Defines a peblar select entity.""" + + entity_description: PeblarSelectEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarSelectEntityDescription, + ) -> None: + """Initialize the select entity.""" + super().__init__(entry.runtime_data.user_configuraton_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}-{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + + @property + def current_option(self) -> str | None: + """Return the selected entity option to represent the entity state.""" + return self.entity_description.current_fn(self.coordinator.data) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.entity_description.select_fn(self.coordinator.peblar, option) + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 2e23fcfcdcd..a36cd14fe48 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -33,6 +33,18 @@ } }, "entity": { + "select": { + "smart_charging": { + "name": "Smart charging", + "state": { + "default": "Default", + "fast_solar": "Fast solar", + "pure_solar": "Pure solar", + "scheduled": "Scheduled", + "smart_solar": "Smart solar" + } + } + }, "update": { "customization": { "name": "Customization" diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index 2db28d3a7e6..8831697f74e 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -6,7 +6,12 @@ from collections.abc import Generator from contextlib import nullcontext from unittest.mock import MagicMock, patch -from peblar import PeblarMeter, PeblarSystemInformation, PeblarVersions +from peblar import ( + PeblarMeter, + PeblarSystemInformation, + PeblarUserConfiguration, + PeblarVersions, +) import pytest from homeassistant.components.peblar.const import DOMAIN @@ -51,6 +56,9 @@ def mock_peblar() -> Generator[MagicMock]: peblar.current_versions.return_value = PeblarVersions.from_json( load_fixture("current_versions.json", DOMAIN) ) + peblar.user_configuration.return_value = PeblarUserConfiguration.from_json( + load_fixture("user_configuration.json", DOMAIN) + ) peblar.system_information.return_value = PeblarSystemInformation.from_json( load_fixture("system_information.json", DOMAIN) ) diff --git a/tests/components/peblar/fixtures/user_configuration.json b/tests/components/peblar/fixtures/user_configuration.json new file mode 100644 index 00000000000..b778ad35f18 --- /dev/null +++ b/tests/components/peblar/fixtures/user_configuration.json @@ -0,0 +1,59 @@ +{ + "BopFallbackCurrent": 6000, + "BopHomeWizardAddress": "p1meter-093586", + "BopSource": "homewizard", + "BopSourceParameters": "{}", + "ConnectedPhases": 1, + "CurrentCtrlBopCtType": "CTK05-14", + "CurrentCtrlBopEnable": true, + "CurrentCtrlBopFuseRating": 35, + "CurrentCtrlFixedChargeCurrentLimit": 16, + "GroundMonitoring": true, + "GroupLoadBalancingEnable": false, + "GroupLoadBalancingFallbackCurrent": 6, + "GroupLoadBalancingGroupId": 1, + "GroupLoadBalancingInterface": "RS485", + "GroupLoadBalancingMaxCurrent": 0, + "GroupLoadBalancingRole": "", + "HmiBuzzerVolume": 1, + "HmiLedIntensityManual": 0, + "HmiLedIntensityMax": 100, + "HmiLedIntensityMin": 1, + "HmiLedIntensityMode": "Fixed", + "LocalRestApiAccessMode": "ReadWrite", + "LocalRestApiAllowed": true, + "LocalRestApiEnable": true, + "LocalSmartChargingAllowed": true, + "ModbusServerAccessMode": "ReadOnly", + "ModbusServerAllowed": true, + "ModbusServerEnable": true, + "PhaseRotation": "RST", + "PowerLimitInputDi1Inverse": false, + "PowerLimitInputDi1Limit": 6, + "PowerLimitInputDi2Inverse": false, + "PowerLimitInputDi2Limit": 0, + "PowerLimitInputEnable": false, + "PredefinedCpoName": "", + "ScheduledChargingAllowed": true, + "ScheduledChargingEnable": false, + "SeccOcppActive": false, + "SeccOcppUri": "", + "SessionManagerChargeWithoutAuth": false, + "SolarChargingAllowed": true, + "SolarChargingEnable": true, + "SolarChargingMode": "PureSolar", + "SolarChargingSource": "homewizard", + "SolarChargingSourceParameters": "{\"address\":\"p1meter-093586\"}", + "TimeZone": "Europe/Amsterdam", + "UserDefinedChargeLimitCurrent": 16, + "UserDefinedChargeLimitCurrentAllowed": true, + "UserDefinedHouseholdPowerLimit": 20000, + "UserDefinedHouseholdPowerLimitAllowed": true, + "UserDefinedHouseholdPowerLimitEnable": false, + "UserDefinedHouseholdPowerLimitSource": "homewizard", + "UserDefinedHouseholdPowerLimitSourceParameters": "{\"address\":\"p1meter-093586\"}", + "UserKeepSocketLocked": false, + "VDEPhaseImbalanceEnable": false, + "VDEPhaseImbalanceLimit": 20, + "WebIfUpdateHelper": true +} diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr index 7701c1eb159..fa6eb857e09 100644 --- a/tests/components/peblar/snapshots/test_diagnostics.ambr +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -75,6 +75,67 @@ 'WlanApMacAddr': '00:0F:11:58:86:98', 'WlanStaMacAddr': '00:0F:11:58:86:99', }), + 'user_configuration': dict({ + 'BopFallbackCurrent': 6000, + 'BopHomeWizardAddress': 'p1meter-093586', + 'BopSource': 'homewizard', + 'BopSourceParameters': '{}', + 'ConnectedPhases': 1, + 'CurrentCtrlBopCtType': 'CTK05-14', + 'CurrentCtrlBopEnable': True, + 'CurrentCtrlBopFuseRating': 35, + 'CurrentCtrlFixedChargeCurrentLimit': 16, + 'GroundMonitoring': True, + 'GroupLoadBalancingEnable': False, + 'GroupLoadBalancingFallbackCurrent': 6, + 'GroupLoadBalancingGroupId': 1, + 'GroupLoadBalancingInterface': 'RS485', + 'GroupLoadBalancingMaxCurrent': 0, + 'GroupLoadBalancingRole': '', + 'HmiBuzzerVolume': 1, + 'HmiLedIntensityManual': 0, + 'HmiLedIntensityMax': 100, + 'HmiLedIntensityMin': 1, + 'HmiLedIntensityMode': 'Fixed', + 'LocalRestApiAccessMode': 'ReadWrite', + 'LocalRestApiAllowed': True, + 'LocalRestApiEnable': True, + 'LocalSmartChargingAllowed': True, + 'ModbusServerAccessMode': 'ReadOnly', + 'ModbusServerAllowed': True, + 'ModbusServerEnable': True, + 'PhaseRotation': 'RST', + 'PowerLimitInputDi1Inverse': False, + 'PowerLimitInputDi1Limit': 6, + 'PowerLimitInputDi2Inverse': False, + 'PowerLimitInputDi2Limit': 0, + 'PowerLimitInputEnable': False, + 'PredefinedCpoName': '', + 'ScheduledChargingAllowed': True, + 'ScheduledChargingEnable': False, + 'SeccOcppActive': False, + 'SeccOcppUri': '', + 'SessionManagerChargeWithoutAuth': False, + 'SolarChargingAllowed': True, + 'SolarChargingEnable': True, + 'SolarChargingMode': 'PureSolar', + 'SolarChargingSource': 'homewizard', + 'SolarChargingSourceParameters': dict({ + 'address': 'p1meter-093586', + }), + 'TimeZone': 'Europe/Amsterdam', + 'UserDefinedChargeLimitCurrent': 16, + 'UserDefinedChargeLimitCurrentAllowed': True, + 'UserDefinedHouseholdPowerLimit': 20000, + 'UserDefinedHouseholdPowerLimitAllowed': True, + 'UserDefinedHouseholdPowerLimitEnable': False, + 'UserDefinedHouseholdPowerLimitSource': 'homewizard', + 'UserKeepSocketLocked': False, + 'VDEPhaseImbalanceEnable': False, + 'VDEPhaseImbalanceLimit': 20, + 'WebIfUpdateHelper': True, + 'smart_charging': 'pure_solar', + }), 'versions': dict({ 'available': dict({ 'Customization': 'Peblar-1.9', diff --git a/tests/components/peblar/snapshots/test_select.ambr b/tests/components/peblar/snapshots/test_select.ambr new file mode 100644 index 00000000000..9f0852d7cf4 --- /dev/null +++ b/tests/components/peblar/snapshots/test_select.ambr @@ -0,0 +1,62 @@ +# serializer version: 1 +# name: test_entities[select][select.peblar_ev_charger_smart_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'default', + 'fast_solar', + 'pure_solar', + 'scheduled', + 'smart_solar', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.peblar_ev_charger_smart_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Smart charging', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smart_charging', + 'unique_id': '23-45-A4O-MOF-smart_charging', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[select][select.peblar_ev_charger_smart_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Peblar EV Charger Smart charging', + 'options': list([ + 'default', + 'fast_solar', + 'pure_solar', + 'scheduled', + 'smart_solar', + ]), + }), + 'context': , + 'entity_id': 'select.peblar_ev_charger_smart_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'pure_solar', + }) +# --- diff --git a/tests/components/peblar/test_select.py b/tests/components/peblar/test_select.py new file mode 100644 index 00000000000..e20d84da755 --- /dev/null +++ b/tests/components/peblar/test_select.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar select platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.SELECT], indirect=True) +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the select entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From b5a7a41ebe4af6545aa34ac34b33a3026b727cdc Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sat, 21 Dec 2024 15:10:14 +0100 Subject: [PATCH 580/677] KNX: Option to select specific tunnel endpoint on TCP connections (#131996) --- homeassistant/components/knx/__init__.py | 3 + homeassistant/components/knx/config_flow.py | 143 ++++++++++++++-- homeassistant/components/knx/strings.json | 18 +- tests/components/knx/test_config_flow.py | 180 +++++++++++++++++++- 4 files changed, 320 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index ea654c358e7..edb9cc62008 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -401,6 +401,9 @@ class KNXModule: ) return ConnectionConfig( auto_reconnect=True, + individual_address=self.entry.data.get( + CONF_KNX_TUNNEL_ENDPOINT_IA, # may be configured at knxkey upload + ), secure_config=SecureConfig( knxkeys_password=self.entry.data.get(CONF_KNX_KNXKEY_PASSWORD), knxkeys_file_path=_knxkeys_file, diff --git a/homeassistant/components/knx/config_flow.py b/homeassistant/components/knx/config_flow.py index feeb7626577..eda160cd1a6 100644 --- a/homeassistant/components/knx/config_flow.py +++ b/homeassistant/components/knx/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from abc import ABC, abstractmethod from collections.abc import AsyncGenerator -from typing import Any, Final +from typing import Any, Final, Literal import voluptuous as vol from xknx import XKNX @@ -121,6 +121,15 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): self._gatewayscanner: GatewayScanner | None = None self._async_scan_gen: AsyncGenerator[GatewayDescriptor] | None = None + @property + def _xknx(self) -> XKNX: + """Return XKNX instance.""" + if isinstance(self, OptionsFlow) and ( + knx_module := self.hass.data.get(KNX_MODULE_KEY) + ): + return knx_module.xknx + return XKNX() + @abstractmethod def finish_flow(self) -> ConfigFlowResult: """Finish the flow.""" @@ -183,14 +192,8 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): CONF_KNX_ROUTING: CONF_KNX_ROUTING.capitalize(), } - if isinstance(self, OptionsFlow) and ( - knx_module := self.hass.data.get(KNX_MODULE_KEY) - ): - xknx = knx_module.xknx - else: - xknx = XKNX() self._gatewayscanner = GatewayScanner( - xknx, stop_on_found=0, timeout_in_seconds=2 + self._xknx, stop_on_found=0, timeout_in_seconds=2 ) # keep a reference to the generator to scan in background until user selects a connection type self._async_scan_gen = self._gatewayscanner.async_scan() @@ -204,8 +207,25 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): CONF_KNX_AUTOMATIC: CONF_KNX_AUTOMATIC.capitalize() } | supported_connection_types + default_connection_type: Literal["automatic", "tunneling", "routing"] + _current_conn = self.initial_data.get(CONF_KNX_CONNECTION_TYPE) + if _current_conn in ( + CONF_KNX_TUNNELING, + CONF_KNX_TUNNELING_TCP, + CONF_KNX_TUNNELING_TCP_SECURE, + ): + default_connection_type = CONF_KNX_TUNNELING + elif _current_conn in (CONF_KNX_ROUTING, CONF_KNX_ROUTING_SECURE): + default_connection_type = CONF_KNX_ROUTING + elif CONF_KNX_AUTOMATIC in supported_connection_types: + default_connection_type = CONF_KNX_AUTOMATIC + else: + default_connection_type = CONF_KNX_TUNNELING + fields = { - vol.Required(CONF_KNX_CONNECTION_TYPE): vol.In(supported_connection_types) + vol.Required( + CONF_KNX_CONNECTION_TYPE, default=default_connection_type + ): vol.In(supported_connection_types) } return self.async_show_form( step_id="connection_type", data_schema=vol.Schema(fields) @@ -216,8 +236,7 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): ) -> ConfigFlowResult: """Select a tunnel from a list. - Will be skipped if the gateway scan was unsuccessful - or if only one gateway was found. + Will be skipped if the gateway scan was unsuccessful. """ if user_input is not None: if user_input[CONF_KNX_GATEWAY] == OPTION_MANUAL_TUNNEL: @@ -247,6 +266,8 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): user_password=None, tunnel_endpoint_ia=None, ) + if connection_type == CONF_KNX_TUNNELING_TCP: + return await self.async_step_tcp_tunnel_endpoint() if connection_type == CONF_KNX_TUNNELING_TCP_SECURE: return await self.async_step_secure_key_source_menu_tunnel() self.new_title = f"Tunneling @ {self._selected_tunnel}" @@ -255,16 +276,99 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): if not self._found_tunnels: return await self.async_step_manual_tunnel() - errors: dict = {} - tunnel_options = { - str(tunnel): f"{tunnel}{' 🔐' if tunnel.tunnelling_requires_secure else ''}" + tunnel_options = [ + selector.SelectOptionDict( + value=str(tunnel), + label=( + f"{tunnel}" + f"{' TCP' if tunnel.supports_tunnelling_tcp else ' UDP'}" + f"{' 🔐 Secure tunneling' if tunnel.tunnelling_requires_secure else ''}" + ), + ) for tunnel in self._found_tunnels + ] + tunnel_options.append( + selector.SelectOptionDict( + value=OPTION_MANUAL_TUNNEL, label=OPTION_MANUAL_TUNNEL + ) + ) + default_tunnel = next( + ( + str(tunnel) + for tunnel in self._found_tunnels + if tunnel.ip_addr == self.initial_data.get(CONF_HOST) + ), + vol.UNDEFINED, + ) + fields = { + vol.Required( + CONF_KNX_GATEWAY, default=default_tunnel + ): selector.SelectSelector( + selector.SelectSelectorConfig( + options=tunnel_options, + mode=selector.SelectSelectorMode.LIST, + ) + ) } - tunnel_options |= {OPTION_MANUAL_TUNNEL: OPTION_MANUAL_TUNNEL} - fields = {vol.Required(CONF_KNX_GATEWAY): vol.In(tunnel_options)} + return self.async_show_form(step_id="tunnel", data_schema=vol.Schema(fields)) + + async def async_step_tcp_tunnel_endpoint( + self, user_input: dict | None = None + ) -> ConfigFlowResult: + """Select specific tunnel endpoint for plain TCP connection.""" + if user_input is not None: + selected_tunnel_ia: str | None = ( + None + if user_input[CONF_KNX_TUNNEL_ENDPOINT_IA] == CONF_KNX_AUTOMATIC + else user_input[CONF_KNX_TUNNEL_ENDPOINT_IA] + ) + self.new_entry_data |= KNXConfigEntryData( + tunnel_endpoint_ia=selected_tunnel_ia, + ) + self.new_title = ( + f"{selected_tunnel_ia or 'Tunneling'} @ {self._selected_tunnel}" + ) + return self.finish_flow() + + # this step is only called from async_step_tunnel so self._selected_tunnel is always set + assert self._selected_tunnel + # skip if only one tunnel endpoint or no tunnelling slot infos + if len(self._selected_tunnel.tunnelling_slots) <= 1: + return self.finish_flow() + + tunnel_endpoint_options = [ + selector.SelectOptionDict( + value=CONF_KNX_AUTOMATIC, label=CONF_KNX_AUTOMATIC.capitalize() + ) + ] + _current_ia = self._xknx.current_address + tunnel_endpoint_options.extend( + selector.SelectOptionDict( + value=str(slot), + label=( + f"{slot} - {'current connection' if slot == _current_ia else 'occupied' if not slot_status.free else 'free'}" + ), + ) + for slot, slot_status in self._selected_tunnel.tunnelling_slots.items() + ) + default_endpoint = ( + self.initial_data.get(CONF_KNX_TUNNEL_ENDPOINT_IA) or CONF_KNX_AUTOMATIC + ) return self.async_show_form( - step_id="tunnel", data_schema=vol.Schema(fields), errors=errors + step_id="tcp_tunnel_endpoint", + data_schema=vol.Schema( + { + vol.Required( + CONF_KNX_TUNNEL_ENDPOINT_IA, default=default_endpoint + ): selector.SelectSelector( + selector.SelectSelectorConfig( + options=tunnel_endpoint_options, + mode=selector.SelectSelectorMode.LIST, + ) + ), + } + ), ) async def async_step_manual_tunnel( @@ -612,12 +716,15 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): ) for endpoint in self._tunnel_endpoints ) + default_endpoint = ( + self.initial_data.get(CONF_KNX_TUNNEL_ENDPOINT_IA) or CONF_KNX_AUTOMATIC + ) return self.async_show_form( step_id="knxkeys_tunnel_select", data_schema=vol.Schema( { vol.Required( - CONF_KNX_TUNNEL_ENDPOINT_IA, default=CONF_KNX_AUTOMATIC + CONF_KNX_TUNNEL_ENDPOINT_IA, default=default_endpoint ): selector.SelectSelector( selector.SelectSelectorConfig( options=tunnel_endpoint_options, diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index d697fa79e78..cde697007aa 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -15,6 +15,13 @@ "gateway": "KNX Tunnel Connection" } }, + "tcp_tunnel_endpoint": { + "title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]", + "description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]", + "data": { + "tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]" + } + }, "manual_tunnel": { "title": "Tunnel settings", "description": "Please enter the connection information of your tunneling device.", @@ -61,9 +68,9 @@ }, "knxkeys_tunnel_select": { "title": "Tunnel endpoint", - "description": "Select the tunnel used for connection.", + "description": "Select the tunnel endpoint used for the connection.", "data": { - "user_id": "`Automatic` will use the first free tunnel endpoint." + "user_id": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option." } }, "secure_tunnel_manual": { @@ -159,6 +166,13 @@ "gateway": "[%key:component::knx::config::step::tunnel::data::gateway%]" } }, + "tcp_tunnel_endpoint": { + "title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]", + "description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]", + "data": { + "tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]" + } + }, "manual_tunnel": { "title": "[%key:component::knx::config::step::manual_tunnel::title%]", "description": "[%key:component::knx::config::step::manual_tunnel::description%]", diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index 2187721a518..8ed79f837bb 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -7,6 +7,7 @@ import pytest from xknx.exceptions.exception import CommunicationError, InvalidSecureConfiguration from xknx.io import DEFAULT_MCAST_GRP, DEFAULT_MCAST_PORT from xknx.io.gateway_scanner import GatewayDescriptor +from xknx.knxip.dib import TunnelingSlotStatus from xknx.secure.keyring import sync_load_keyring from xknx.telegram import IndividualAddress @@ -105,6 +106,7 @@ def _gateway_descriptor( port: int, supports_tunnelling_tcp: bool = False, requires_secure: bool = False, + slots: bool = True, ) -> GatewayDescriptor: """Get mock gw descriptor.""" descriptor = GatewayDescriptor( @@ -120,6 +122,12 @@ def _gateway_descriptor( ) descriptor.tunnelling_requires_secure = requires_secure descriptor.routing_requires_secure = requires_secure + if supports_tunnelling_tcp and slots: + descriptor.tunnelling_slots = { + IndividualAddress("1.0.240"): TunnelingSlotStatus(True, True, True), + IndividualAddress("1.0.241"): TunnelingSlotStatus(True, True, False), + IndividualAddress("1.0.242"): TunnelingSlotStatus(True, True, True), + } return descriptor @@ -791,12 +799,14 @@ async def test_tunneling_setup_for_multiple_found_gateways( hass: HomeAssistant, knx_setup ) -> None: """Test tunneling if multiple gateways are found.""" - gateway = _gateway_descriptor("192.168.0.1", 3675) - gateway2 = _gateway_descriptor("192.168.1.100", 3675) + gateway_udp = _gateway_descriptor("192.168.0.1", 3675) + gateway_tcp = _gateway_descriptor("192.168.1.100", 3675, True) with patch( "homeassistant.components.knx.config_flow.GatewayScanner" ) as gateway_scanner_mock: - gateway_scanner_mock.return_value = GatewayScannerMock([gateway, gateway2]) + gateway_scanner_mock.return_value = GatewayScannerMock( + [gateway_udp, gateway_tcp] + ) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -815,7 +825,7 @@ async def test_tunneling_setup_for_multiple_found_gateways( result = await hass.config_entries.flow.async_configure( tunnel_flow["flow_id"], - {CONF_KNX_GATEWAY: str(gateway)}, + {CONF_KNX_GATEWAY: str(gateway_udp)}, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { @@ -833,6 +843,110 @@ async def test_tunneling_setup_for_multiple_found_gateways( knx_setup.assert_called_once() +async def test_tunneling_setup_tcp_endpoint_select_skip( + hass: HomeAssistant, knx_setup +) -> None: + """Test tunneling TCP endpoint selection skipped if no slot info found.""" + gateway_udp = _gateway_descriptor("192.168.0.1", 3675) + gateway_tcp_no_slots = _gateway_descriptor("192.168.1.100", 3675, True, slots=False) + with patch( + "homeassistant.components.knx.config_flow.GatewayScanner" + ) as gateway_scanner_mock: + gateway_scanner_mock.return_value = GatewayScannerMock( + [gateway_udp, gateway_tcp_no_slots] + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + tunnel_flow = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_KNX_CONNECTION_TYPE: CONF_KNX_TUNNELING, + }, + ) + assert tunnel_flow["type"] is FlowResultType.FORM + assert tunnel_flow["step_id"] == "tunnel" + assert not tunnel_flow["errors"] + + result = await hass.config_entries.flow.async_configure( + tunnel_flow["flow_id"], + {CONF_KNX_GATEWAY: str(gateway_tcp_no_slots)}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + **DEFAULT_ENTRY_DATA, + CONF_KNX_CONNECTION_TYPE: CONF_KNX_TUNNELING_TCP, + CONF_HOST: "192.168.1.100", + CONF_PORT: 3675, + CONF_KNX_INDIVIDUAL_ADDRESS: "0.0.240", + CONF_KNX_ROUTE_BACK: False, + CONF_KNX_TUNNEL_ENDPOINT_IA: None, + CONF_KNX_SECURE_DEVICE_AUTHENTICATION: None, + CONF_KNX_SECURE_USER_ID: None, + CONF_KNX_SECURE_USER_PASSWORD: None, + } + knx_setup.assert_called_once() + + +async def test_tunneling_setup_tcp_endpoint_select( + hass: HomeAssistant, knx_setup +) -> None: + """Test tunneling TCP endpoint selection.""" + gateway_tcp = _gateway_descriptor("192.168.1.100", 3675, True) + with patch( + "homeassistant.components.knx.config_flow.GatewayScanner" + ) as gateway_scanner_mock: + gateway_scanner_mock.return_value = GatewayScannerMock([gateway_tcp]) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + tunnel_flow = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_KNX_CONNECTION_TYPE: CONF_KNX_TUNNELING, + }, + ) + assert tunnel_flow["type"] is FlowResultType.FORM + assert tunnel_flow["step_id"] == "tunnel" + assert not tunnel_flow["errors"] + + endpoint_flow = await hass.config_entries.flow.async_configure( + tunnel_flow["flow_id"], + {CONF_KNX_GATEWAY: str(gateway_tcp)}, + ) + + assert endpoint_flow["type"] is FlowResultType.FORM + assert endpoint_flow["step_id"] == "tcp_tunnel_endpoint" + assert not endpoint_flow["errors"] + + result = await hass.config_entries.flow.async_configure( + endpoint_flow["flow_id"], + {CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.242"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "1.0.242 @ 1.0.0 - Test @ 192.168.1.100:3675" + assert result["data"] == { + **DEFAULT_ENTRY_DATA, + CONF_KNX_CONNECTION_TYPE: CONF_KNX_TUNNELING_TCP, + CONF_HOST: "192.168.1.100", + CONF_PORT: 3675, + CONF_KNX_INDIVIDUAL_ADDRESS: "0.0.240", + CONF_KNX_ROUTE_BACK: False, + CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.242", + CONF_KNX_SECURE_DEVICE_AUTHENTICATION: None, + CONF_KNX_SECURE_USER_ID: None, + CONF_KNX_SECURE_USER_PASSWORD: None, + } + knx_setup.assert_called_once() + + @pytest.mark.parametrize( "gateway", [ @@ -1319,6 +1433,64 @@ async def test_options_flow_secure_manual_to_keyfile( knx_setup.assert_called_once() +async def test_options_flow_routing(hass: HomeAssistant, knx_setup) -> None: + """Test options flow changing routing settings.""" + mock_config_entry = MockConfigEntry( + title="KNX", + domain="knx", + data={ + **DEFAULT_ENTRY_DATA, + CONF_KNX_CONNECTION_TYPE: CONF_KNX_ROUTING, + }, + ) + gateway = _gateway_descriptor("192.168.0.1", 3676) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + menu_step = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + + with patch( + "homeassistant.components.knx.config_flow.GatewayScanner" + ) as gateway_scanner_mock: + gateway_scanner_mock.return_value = GatewayScannerMock([gateway]) + result = await hass.config_entries.options.async_configure( + menu_step["flow_id"], + {"next_step_id": "connection_type"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "connection_type" + + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_KNX_CONNECTION_TYPE: CONF_KNX_ROUTING, + }, + ) + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "routing" + assert result2["errors"] == {} + + result3 = await hass.config_entries.options.async_configure( + result2["flow_id"], + { + CONF_KNX_INDIVIDUAL_ADDRESS: "2.0.4", + }, + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert mock_config_entry.data == { + **DEFAULT_ENTRY_DATA, + CONF_KNX_CONNECTION_TYPE: CONF_KNX_ROUTING, + CONF_KNX_MCAST_GRP: DEFAULT_MCAST_GRP, + CONF_KNX_MCAST_PORT: DEFAULT_MCAST_PORT, + CONF_KNX_LOCAL_IP: None, + CONF_KNX_INDIVIDUAL_ADDRESS: "2.0.4", + CONF_KNX_SECURE_DEVICE_AUTHENTICATION: None, + CONF_KNX_SECURE_USER_ID: None, + CONF_KNX_SECURE_USER_PASSWORD: None, + CONF_KNX_TUNNEL_ENDPOINT_IA: None, + } + knx_setup.assert_called_once() + + async def test_options_communication_settings( hass: HomeAssistant, knx_setup, mock_config_entry: MockConfigEntry ) -> None: From 7e2d382ff4a1290b77b0b705e1ec764589bfd29e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 15:10:35 +0100 Subject: [PATCH 581/677] Update aiohasupervisor to 0.2.2b5 (#133722) --- homeassistant/components/hassio/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/hassio/manifest.json b/homeassistant/components/hassio/manifest.json index 7276b76afc0..c9ecf6657e8 100644 --- a/homeassistant/components/hassio/manifest.json +++ b/homeassistant/components/hassio/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hassio", "iot_class": "local_polling", "quality_scale": "internal", - "requirements": ["aiohasupervisor==0.2.2b4"], + "requirements": ["aiohasupervisor==0.2.2b5"], "single_config_entry": true } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 9473871efdd..bfa479b9c13 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,7 +3,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohasupervisor==0.2.2b4 +aiohasupervisor==0.2.2b5 aiohttp-fast-zlib==0.2.0 aiohttp==3.11.11 aiohttp_cors==0.7.0 diff --git a/pyproject.toml b/pyproject.toml index 71eae73a859..369f6f40921 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ # Integrations may depend on hassio integration without listing it to # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 - "aiohasupervisor==0.2.2b4", + "aiohasupervisor==0.2.2b5", "aiohttp==3.11.11", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.0", diff --git a/requirements.txt b/requirements.txt index 78aa370c4ec..82405dc44ef 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohasupervisor==0.2.2b4 +aiohasupervisor==0.2.2b5 aiohttp==3.11.11 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 4a05da9d61a..2a353d7f9c3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -261,7 +261,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b4 +aiohasupervisor==0.2.2b5 # homeassistant.components.homekit_controller aiohomekit==3.2.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 032165b6182..9503ac7d79a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -246,7 +246,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.hassio -aiohasupervisor==0.2.2b4 +aiohasupervisor==0.2.2b5 # homeassistant.components.homekit_controller aiohomekit==3.2.7 From 9e6c1d5b62929a2d502975d0ff2a1201a96d4a9d Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 15:18:08 +0100 Subject: [PATCH 582/677] Add power and energy related sensors to Peblar Rocksolid EV Chargers (#133729) --- homeassistant/components/peblar/sensor.py | 163 ++++- homeassistant/components/peblar/strings.json | 35 + tests/components/peblar/fixtures/meter.json | 12 +- .../peblar/fixtures/user_configuration.json | 2 +- .../peblar/snapshots/test_diagnostics.ambr | 14 +- .../peblar/snapshots/test_sensor.ambr | 603 +++++++++++++++++- tests/components/peblar/test_sensor.py | 2 +- 7 files changed, 803 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index d31d929fcab..bb9fe9d4937 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from peblar import PeblarMeter +from peblar import PeblarMeter, PeblarUserConfiguration from homeassistant.components.sensor import ( SensorDeviceClass, @@ -13,7 +13,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import UnitOfEnergy +from homeassistant.const import ( + EntityCategory, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfPower, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -27,19 +33,166 @@ from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator class PeblarSensorDescription(SensorEntityDescription): """Describe an Peblar sensor.""" + has_fn: Callable[[PeblarUserConfiguration], bool] = lambda _: True value_fn: Callable[[PeblarMeter], int | None] DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( PeblarSensorDescription( - key="energy_total", + key="current", + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases == 1, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda x: x.current_phase_1, + ), + PeblarSensorDescription( + key="current_phase_1", + translation_key="current_phase_1", + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda x: x.current_phase_1, + ), + PeblarSensorDescription( + key="current_phase_2", + translation_key="current_phase_2", + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda x: x.current_phase_2, + ), + PeblarSensorDescription( + key="current_phase_3", + translation_key="current_phase_3", + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases == 3, + native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda x: x.current_phase_3, + ), + PeblarSensorDescription( + key="energy_session", + translation_key="energy_session", device_class=SensorDeviceClass.ENERGY, native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=2, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda x: x.energy_session, + ), + PeblarSensorDescription( + key="energy_total", + translation_key="energy_total", + device_class=SensorDeviceClass.ENERGY, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + state_class=SensorStateClass.TOTAL_INCREASING, + suggested_display_precision=2, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, value_fn=lambda x: x.energy_total, ), + PeblarSensorDescription( + key="power_total", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.power_total, + ), + PeblarSensorDescription( + key="power_phase_1", + translation_key="power_phase_1", + device_class=SensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.power_phase_1, + ), + PeblarSensorDescription( + key="power_phase_2", + translation_key="power_phase_2", + device_class=SensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.power_phase_2, + ), + PeblarSensorDescription( + key="power_phase_3", + translation_key="power_phase_3", + device_class=SensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases == 3, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.power_phase_3, + ), + PeblarSensorDescription( + key="voltage", + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases == 1, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.voltage_phase_1, + ), + PeblarSensorDescription( + key="voltage_phase_1", + translation_key="voltage_phase_1", + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.voltage_phase_1, + ), + PeblarSensorDescription( + key="voltage_phase_2", + translation_key="voltage_phase_2", + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases >= 2, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.voltage_phase_2, + ), + PeblarSensorDescription( + key="voltage_phase_3", + translation_key="voltage_phase_3", + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + has_fn=lambda x: x.connected_phases == 3, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda x: x.voltage_phase_3, + ), ) @@ -50,7 +203,9 @@ async def async_setup_entry( ) -> None: """Set up Peblar sensors based on a config entry.""" async_add_entities( - PeblarSensorEntity(entry, description) for description in DESCRIPTIONS + PeblarSensorEntity(entry, description) + for description in DESCRIPTIONS + if description.has_fn(entry.runtime_data.user_configuraton_coordinator.data) ) diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index a36cd14fe48..02aee0eacc9 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -45,6 +45,41 @@ } } }, + "sensor": { + "current_phase_1": { + "name": "Current phase 1" + }, + "current_phase_2": { + "name": "Current phase 2" + }, + "current_phase_3": { + "name": "Current phase 3" + }, + "energy_session": { + "name": "Session energy" + }, + "energy_total": { + "name": "Lifetime energy" + }, + "power_phase_1": { + "name": "Power phase 1" + }, + "power_phase_2": { + "name": "Power phase 2" + }, + "power_phase_3": { + "name": "Power phase 3" + }, + "voltage_phase_1": { + "name": "Voltage phase 1" + }, + "voltage_phase_2": { + "name": "Voltage phase 2" + }, + "voltage_phase_3": { + "name": "Voltage phase 3" + } + }, "update": { "customization": { "name": "Customization" diff --git a/tests/components/peblar/fixtures/meter.json b/tests/components/peblar/fixtures/meter.json index 1f32a3fbebc..f426adf9b8a 100644 --- a/tests/components/peblar/fixtures/meter.json +++ b/tests/components/peblar/fixtures/meter.json @@ -1,14 +1,14 @@ { - "CurrentPhase1": 0, + "CurrentPhase1": 14242, "CurrentPhase2": 0, "CurrentPhase3": 0, - "EnergySession": 0, - "EnergyTotal": 880321, - "PowerPhase1": 0, + "EnergySession": 381, + "EnergyTotal": 880703, + "PowerPhase1": 3185, "PowerPhase2": 0, "PowerPhase3": 0, - "PowerTotal": 0, - "VoltagePhase1": 230, + "PowerTotal": 3185, + "VoltagePhase1": 223, "VoltagePhase2": null, "VoltagePhase3": null } diff --git a/tests/components/peblar/fixtures/user_configuration.json b/tests/components/peblar/fixtures/user_configuration.json index b778ad35f18..b41aecd00ef 100644 --- a/tests/components/peblar/fixtures/user_configuration.json +++ b/tests/components/peblar/fixtures/user_configuration.json @@ -3,7 +3,7 @@ "BopHomeWizardAddress": "p1meter-093586", "BopSource": "homewizard", "BopSourceParameters": "{}", - "ConnectedPhases": 1, + "ConnectedPhases": 3, "CurrentCtrlBopCtType": "CTK05-14", "CurrentCtrlBopEnable": true, "CurrentCtrlBopFuseRating": 35, diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr index fa6eb857e09..08d4d3ac6c6 100644 --- a/tests/components/peblar/snapshots/test_diagnostics.ambr +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -2,16 +2,16 @@ # name: test_diagnostics dict({ 'meter': dict({ - 'CurrentPhase1': 0, + 'CurrentPhase1': 14242, 'CurrentPhase2': 0, 'CurrentPhase3': 0, - 'EnergySession': 0, - 'EnergyTotal': 880321, - 'PowerPhase1': 0, + 'EnergySession': 381, + 'EnergyTotal': 880703, + 'PowerPhase1': 3185, 'PowerPhase2': 0, 'PowerPhase3': 0, - 'PowerTotal': 0, - 'VoltagePhase1': 230, + 'PowerTotal': 3185, + 'VoltagePhase1': 223, }), 'system_information': dict({ 'BopCalIGainA': 264625, @@ -80,7 +80,7 @@ 'BopHomeWizardAddress': 'p1meter-093586', 'BopSource': 'homewizard', 'BopSourceParameters': '{}', - 'ConnectedPhases': 1, + 'ConnectedPhases': 3, 'CurrentCtrlBopCtType': 'CTK05-14', 'CurrentCtrlBopEnable': True, 'CurrentCtrlBopFuseRating': 35, diff --git a/tests/components/peblar/snapshots/test_sensor.ambr b/tests/components/peblar/snapshots/test_sensor.ambr index 29a5d7f7dd1..c3020b60078 100644 --- a/tests/components/peblar/snapshots/test_sensor.ambr +++ b/tests/components/peblar/snapshots/test_sensor.ambr @@ -1,5 +1,176 @@ # serializer version: 1 -# name: test_entities[sensor][sensor.peblar_ev_charger_energy-entry] +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 1', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_phase_1', + 'unique_id': '23-45-A4O-MOF_current_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Peblar EV Charger Current phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.242', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 2', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_phase_2', + 'unique_id': '23-45-A4O-MOF_current_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Peblar EV Charger Current phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current phase 3', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_phase_3', + 'unique_id': '23-45-A4O-MOF_current_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Peblar EV Charger Current phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_current_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_lifetime_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -12,8 +183,8 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.peblar_ev_charger_energy', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_lifetime_energy', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -31,28 +202,442 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Energy', + 'original_name': 'Lifetime energy', 'platform': 'peblar', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, + 'translation_key': 'energy_total', 'unique_id': '23-45-A4O-MOF_energy_total', 'unit_of_measurement': , }) # --- -# name: test_entities[sensor][sensor.peblar_ev_charger_energy-state] +# name: test_entities[sensor][sensor.peblar_ev_charger_lifetime_energy-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', - 'friendly_name': 'Peblar EV Charger Energy', + 'friendly_name': 'Peblar EV Charger Lifetime energy', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.peblar_ev_charger_energy', + 'entity_id': 'sensor.peblar_ev_charger_lifetime_energy', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '880.321', + 'state': '880.703', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.peblar_ev_charger_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_power_total', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Peblar EV Charger Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3185', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power phase 1', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_phase_1', + 'unique_id': '23-45-A4O-MOF_power_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Peblar EV Charger Power phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3185', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power phase 2', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_phase_2', + 'unique_id': '23-45-A4O-MOF_power_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Peblar EV Charger Power phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power phase 3', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_phase_3', + 'unique_id': '23-45-A4O-MOF_power_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_power_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Peblar EV Charger Power phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_power_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_session_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.peblar_ev_charger_session_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Session energy', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_session', + 'unique_id': '23-45-A4O-MOF_energy_session', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_session_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Peblar EV Charger Session energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_session_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.381', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 1', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_phase_1', + 'unique_id': '23-45-A4O-MOF_voltage_phase_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Peblar EV Charger Voltage phase 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '223', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 2', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_phase_2', + 'unique_id': '23-45-A4O-MOF_voltage_phase_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Peblar EV Charger Voltage phase 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage phase 3', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_phase_3', + 'unique_id': '23-45-A4O-MOF_voltage_phase_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Peblar EV Charger Voltage phase 3', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_voltage_phase_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', }) # --- diff --git a/tests/components/peblar/test_sensor.py b/tests/components/peblar/test_sensor.py index e2a49942cd5..97402206d33 100644 --- a/tests/components/peblar/test_sensor.py +++ b/tests/components/peblar/test_sensor.py @@ -12,7 +12,7 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.parametrize("init_integration", [Platform.SENSOR], indirect=True) -@pytest.mark.usefixtures("init_integration") +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") async def test_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, From 9292bfc6eda19ceff288d7381334d75a6f22d680 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Sat, 21 Dec 2024 15:19:55 +0100 Subject: [PATCH 583/677] Update AEMET-OpenData to v0.6.4 (#133723) --- homeassistant/components/aemet/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/aemet/manifest.json b/homeassistant/components/aemet/manifest.json index 5c9d1ff7e5a..24ca0099091 100644 --- a/homeassistant/components/aemet/manifest.json +++ b/homeassistant/components/aemet/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/aemet", "iot_class": "cloud_polling", "loggers": ["aemet_opendata"], - "requirements": ["AEMET-OpenData==0.6.3"] + "requirements": ["AEMET-OpenData==0.6.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2a353d7f9c3..e9376072875 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -4,7 +4,7 @@ -r requirements.txt # homeassistant.components.aemet -AEMET-OpenData==0.6.3 +AEMET-OpenData==0.6.4 # homeassistant.components.honeywell AIOSomecomfort==0.0.28 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9503ac7d79a..c0761fafc8b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -4,7 +4,7 @@ -r requirements_test.txt # homeassistant.components.aemet -AEMET-OpenData==0.6.3 +AEMET-OpenData==0.6.4 # homeassistant.components.honeywell AIOSomecomfort==0.0.28 From ef31413a5986de598b1e29d73e157ea928d84e53 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sat, 21 Dec 2024 15:20:10 +0100 Subject: [PATCH 584/677] Add missing asserts to enphase_envoy config flow test (#133730) --- homeassistant/components/enphase_envoy/quality_scale.yaml | 3 --- tests/components/enphase_envoy/test_config_flow.py | 8 ++++++++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 8e096538f01..d64a62d4a48 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -11,13 +11,10 @@ rules: config-flow-test-coverage: status: todo comment: | - - test_form is missing an assertion for the unique id of the resulting entry - - Let's also have test_user_no_serial_number assert the unique_id (as in, it can't be set to the serial_number since we dont have one, so let's assert what it will result in) - Let's have every test result in either CREATE_ENTRY or ABORT (like test_form_invalid_auth or test_form_cannot_connect, they can be parametrized) - test_zeroconf_token_firmware and test_zeroconf_pre_token_firmware can also be parametrized I think - test_zero_conf_malformed_serial_property - with pytest.raises(KeyError) as ex:: I don't believe this should be able to raise a KeyError Shouldn't we abort the flow? - test_reauth -> Let's also assert result before we start with the async_configure part config-flow: status: todo comment: | diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index 44e2e680d5f..b0b139053ce 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -49,6 +49,7 @@ async def test_form( ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Envoy 1234" + assert result["result"].unique_id == "1234" assert result["data"] == { CONF_HOST: "1.1.1.1", CONF_NAME: "Envoy 1234", @@ -80,6 +81,7 @@ async def test_user_no_serial_number( ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Envoy" + assert result["result"].unique_id is None assert result["data"] == { CONF_HOST: "1.1.1.1", CONF_NAME: "Envoy", @@ -100,6 +102,8 @@ async def test_form_invalid_auth( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) + assert result["type"] is FlowResultType.FORM + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -131,6 +135,8 @@ async def test_form_cannot_connect( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -634,6 +640,8 @@ async def test_reauth( """Test we reauth auth.""" await setup_integration(hass, config_entry) result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { From cc134c820bcfd28ba23fcb2a1bdf2bd5e5b4d50a Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sat, 21 Dec 2024 15:49:24 +0100 Subject: [PATCH 585/677] Reuse title of deleted enphase_envoy config entry if present (#133611) --- .../components/enphase_envoy/config_flow.py | 8 +- .../enphase_envoy/quality_scale.yaml | 2 - .../enphase_envoy/test_config_flow.py | 82 +++++++++++++++++++ 3 files changed, 88 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/enphase_envoy/config_flow.py b/homeassistant/components/enphase_envoy/config_flow.py index 70ba3570e91..1a2186d305e 100644 --- a/homeassistant/components/enphase_envoy/config_flow.py +++ b/homeassistant/components/enphase_envoy/config_flow.py @@ -141,9 +141,13 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): and entry.data[CONF_HOST] == self.ip_address ): _LOGGER.debug( - "Zeroconf update envoy with this ip and blank serial in unique_id", + "Zeroconf update envoy with this ip and blank unique_id", ) - title = f"{ENVOY} {serial}" if entry.title == ENVOY else ENVOY + # Found an entry with blank unique_id (prior deleted) with same ip + # If the title is still default shorthand 'Envoy' then append serial + # to differentiate multiple Envoy. Don't change the title if any other + # title is still present in the old entry. + title = f"{ENVOY} {serial}" if entry.title == ENVOY else entry.title return self.async_update_reload_and_abort( entry, title=title, unique_id=serial, reason="already_configured" ) diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index d64a62d4a48..2b9350ed944 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -18,8 +18,6 @@ rules: config-flow: status: todo comment: | - - async_step_zeroconf -> a config entry title is considered userland, - so if someone renamed their entry, it will be reverted back with the code at L146. - async_step_reaut L160: I believe that the unique is already set when starting a reauth flow - The config flow is missing data descriptions for the other fields dependency-transparency: done diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index b0b139053ce..c20e73d774b 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -631,6 +631,88 @@ async def test_zero_conf_old_blank_entry( assert entry.title == "Envoy 1234" +async def test_zero_conf_old_blank_entry_standard_title( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, +) -> None: + """Test re-using old blank entry was Envoy as title.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "", + CONF_PASSWORD: "", + CONF_NAME: "unknown", + }, + unique_id=None, + title="Envoy", + ) + entry.add_to_hass(hass) + # test if shorthand title Envoy gets serial appended + hass.config_entries.async_update_entry(entry, title="Envoy") + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1"), ip_address("1.1.1.2")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "1234", "protovers": "7.1.2"}, + type="mock_type", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert entry.data[CONF_HOST] == "1.1.1.1" + assert entry.unique_id == "1234" + assert entry.title == "Envoy 1234" + + +async def test_zero_conf_old_blank_entry_user_title( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, +) -> None: + """Test re-using old blank entry with user title.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "", + CONF_PASSWORD: "", + CONF_NAME: "unknown", + }, + unique_id=None, + title="Envoy", + ) + entry.add_to_hass(hass) + # set user title on entry + hass.config_entries.async_update_entry(entry, title="Envoy Backyard") + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.1.1.1"), + ip_addresses=[ip_address("1.1.1.1"), ip_address("1.1.1.2")], + hostname="mock_hostname", + name="mock_name", + port=None, + properties={"serialnum": "1234", "protovers": "7.1.2"}, + type="mock_type", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert entry.data[CONF_HOST] == "1.1.1.1" + assert entry.unique_id == "1234" + assert entry.title == "Envoy Backyard" + + async def test_reauth( hass: HomeAssistant, config_entry: MockConfigEntry, From 3dad5f68961012ee8ee1bc69c2697e615d9e2a1b Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 15:54:02 +0100 Subject: [PATCH 586/677] Replace two outdated occurrences of "service" with "action" (#133728) --- homeassistant/components/knx/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index cde697007aa..6c717c932b8 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -385,7 +385,7 @@ }, "event_register": { "name": "Register knx_event", - "description": "Adds or removes group addresses to knx_event filter for triggering `knx_event`s. Only addresses added with this service can be removed.", + "description": "Adds or removes group addresses to knx_event filter for triggering `knx_event`s. Only addresses added with this action can be removed.", "fields": { "address": { "name": "[%key:component::knx::services::send::fields::address::name%]", @@ -403,7 +403,7 @@ }, "exposure_register": { "name": "Expose to KNX bus", - "description": "Adds or removes exposures to KNX bus. Only exposures added with this service can be removed.", + "description": "Adds or removes exposures to KNX bus. Only exposures added with this action can be removed.", "fields": { "address": { "name": "[%key:component::knx::services::send::fields::address::name%]", From ac2090d2f5e753bba5849c9037ab54e7442b9983 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sat, 21 Dec 2024 16:16:12 +0100 Subject: [PATCH 587/677] Replace "service" with "action" in Z-Wave action descriptions (#133727) Replace all occurrence of "service" with "action" Clean up the remaining occurrences of "service" with "action" to be consistent with the new terminology in Home Assistant. --- homeassistant/components/zwave_js/strings.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 28789bbf9f4..0c3ca6313d4 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -306,7 +306,7 @@ "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` action and require direct calls to the Command Class API.", "fields": { "area_id": { - "description": "The area(s) to target for this service. If an area is specified, all zwave_js devices and entities in that area will be targeted for this service.", + "description": "The area(s) to target for this action. If an area is specified, all zwave_js devices and entities in that area will be targeted for this action.", "name": "Area ID(s)" }, "command_class": { @@ -314,7 +314,7 @@ "name": "[%key:component::zwave_js::services::set_value::fields::command_class::name%]" }, "device_id": { - "description": "The device(s) to target for this service.", + "description": "The device(s) to target for this action.", "name": "Device ID(s)" }, "endpoint": { @@ -322,7 +322,7 @@ "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, "entity_id": { - "description": "The entity ID(s) to target for this service.", + "description": "The entity ID(s) to target for this action.", "name": "Entity ID(s)" }, "method_name": { @@ -556,7 +556,7 @@ "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This action has minimal validation so only use this action if you know what you are doing.", "fields": { "area_id": { - "description": "The area(s) to target for this service. If an area is specified, all zwave_js devices and entities in that area will be targeted for this service.", + "description": "The area(s) to target for this action. If an area is specified, all zwave_js devices and entities in that area will be targeted for this action.", "name": "Area ID(s)" }, "command_class": { @@ -564,7 +564,7 @@ "name": "Command class" }, "device_id": { - "description": "The device(s) to target for this service.", + "description": "The device(s) to target for this action.", "name": "Device ID(s)" }, "endpoint": { @@ -572,7 +572,7 @@ "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, "entity_id": { - "description": "The entity ID(s) to target for this service.", + "description": "The entity ID(s) to target for this action.", "name": "Entity ID(s)" }, "options": { From 4b6febc7579e113f8c22dbf64ff09193fde8445e Mon Sep 17 00:00:00 2001 From: Tom Date: Sat, 21 Dec 2024 16:44:14 +0100 Subject: [PATCH 588/677] Add reconfiguration flow to Plugwise (#132878) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Abílio Costa Co-authored-by: Joost Lekkerkerker --- .../components/plugwise/config_flow.py | 102 ++++++++++++++---- .../components/plugwise/quality_scale.yaml | 4 +- .../components/plugwise/strings.json | 17 ++- tests/components/plugwise/conftest.py | 12 ++- tests/components/plugwise/test_config_flow.py | 101 +++++++++++++++-- 5 files changed, 202 insertions(+), 34 deletions(-) diff --git a/homeassistant/components/plugwise/config_flow.py b/homeassistant/components/plugwise/config_flow.py index 57abb1ccb86..6114dd39a6d 100644 --- a/homeassistant/components/plugwise/config_flow.py +++ b/homeassistant/components/plugwise/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any, Self from plugwise import Smile @@ -41,8 +42,16 @@ from .const import ( ZEROCONF_MAP, ) +_LOGGER = logging.getLogger(__name__) -def base_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: +SMILE_RECONF_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + } +) + + +def smile_user_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: """Generate base schema for gateways.""" schema = vol.Schema({vol.Required(CONF_PASSWORD): str}) @@ -50,6 +59,7 @@ def base_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: schema = schema.extend( { vol.Required(CONF_HOST): str, + # Port under investigation for removal (hence not added in #132878) vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, vol.Required(CONF_USERNAME, default=SMILE): vol.In( {SMILE: FLOW_SMILE, STRETCH: FLOW_STRETCH} @@ -63,7 +73,7 @@ def base_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema: async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> Smile: """Validate whether the user input allows us to connect to the gateway. - Data has the keys from base_schema() with values provided by the user. + Data has the keys from the schema with values provided by the user. """ websession = async_get_clientsession(hass, verify_ssl=False) api = Smile( @@ -77,6 +87,32 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> Smile: return api +async def verify_connection( + hass: HomeAssistant, user_input: dict[str, Any] +) -> tuple[Smile | None, dict[str, str]]: + """Verify and return the gateway connection or an error.""" + errors: dict[str, str] = {} + + try: + return (await validate_input(hass, user_input), errors) + except ConnectionFailedError: + errors[CONF_BASE] = "cannot_connect" + except InvalidAuthentication: + errors[CONF_BASE] = "invalid_auth" + except InvalidSetupError: + errors[CONF_BASE] = "invalid_setup" + except (InvalidXMLError, ResponseError): + errors[CONF_BASE] = "response_error" + except UnsupportedDeviceError: + errors[CONF_BASE] = "unsupported" + except Exception: # noqa: BLE001 + _LOGGER.exception( + "Unknown exception while verifying connection with your Plugwise Smile" + ) + errors[CONF_BASE] = "unknown" + return (None, errors) + + class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Plugwise Smile.""" @@ -166,30 +202,56 @@ class PlugwiseConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_PORT] = self.discovery_info.port user_input[CONF_USERNAME] = self._username - try: - api = await validate_input(self.hass, user_input) - except ConnectionFailedError: - errors[CONF_BASE] = "cannot_connect" - except InvalidAuthentication: - errors[CONF_BASE] = "invalid_auth" - except InvalidSetupError: - errors[CONF_BASE] = "invalid_setup" - except (InvalidXMLError, ResponseError): - errors[CONF_BASE] = "response_error" - except UnsupportedDeviceError: - errors[CONF_BASE] = "unsupported" - except Exception: # noqa: BLE001 - errors[CONF_BASE] = "unknown" - else: + api, errors = await verify_connection(self.hass, user_input) + if api: await self.async_set_unique_id( - api.smile_hostname or api.gateway_id, raise_on_progress=False + api.smile_hostname or api.gateway_id, + raise_on_progress=False, ) self._abort_if_unique_id_configured() - return self.async_create_entry(title=api.smile_name, data=user_input) return self.async_show_form( step_id=SOURCE_USER, - data_schema=base_schema(self.discovery_info), + data_schema=smile_user_schema(self.discovery_info), + errors=errors, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + errors: dict[str, str] = {} + + reconfigure_entry = self._get_reconfigure_entry() + + if user_input: + # Keep current username and password + full_input = { + CONF_HOST: user_input.get(CONF_HOST), + CONF_PORT: reconfigure_entry.data.get(CONF_PORT), + CONF_USERNAME: reconfigure_entry.data.get(CONF_USERNAME), + CONF_PASSWORD: reconfigure_entry.data.get(CONF_PASSWORD), + } + + api, errors = await verify_connection(self.hass, full_input) + if api: + await self.async_set_unique_id( + api.smile_hostname or api.gateway_id, + raise_on_progress=False, + ) + self._abort_if_unique_id_mismatch(reason="not_the_same_smile") + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates=full_input, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema( + data_schema=SMILE_RECONF_SCHEMA, + suggested_values=reconfigure_entry.data, + ), + description_placeholders={"title": reconfigure_entry.title}, errors=errors, ) diff --git a/homeassistant/components/plugwise/quality_scale.yaml b/homeassistant/components/plugwise/quality_scale.yaml index ce0788c44f7..a7b955b4713 100644 --- a/homeassistant/components/plugwise/quality_scale.yaml +++ b/homeassistant/components/plugwise/quality_scale.yaml @@ -52,9 +52,7 @@ rules: diagnostics: done exception-translations: done icon-translations: done - reconfiguration-flow: - status: todo - comment: This integration does not have any reconfiguration steps (yet) investigate how/why + reconfiguration-flow: done dynamic-devices: done discovery-update-info: done repair-issues: diff --git a/homeassistant/components/plugwise/strings.json b/homeassistant/components/plugwise/strings.json index 87a8e120591..d16b38df992 100644 --- a/homeassistant/components/plugwise/strings.json +++ b/homeassistant/components/plugwise/strings.json @@ -1,12 +1,23 @@ { "config": { "step": { + "reconfigure": { + "description": "Update configuration for {title}.", + "data": { + "host": "[%key:common::config_flow::data::ip%]", + "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "[%key:component::plugwise::config::step::user::data_description::host%]", + "port": "[%key:component::plugwise::config::step::user::data_description::port%]" + } + }, "user": { "title": "Connect to the Smile", "description": "Please enter", "data": { - "password": "Smile ID", "host": "[%key:common::config_flow::data::ip%]", + "password": "Smile ID", "port": "[%key:common::config_flow::data::port%]", "username": "Smile Username" }, @@ -28,7 +39,9 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", - "anna_with_adam": "Both Anna and Adam detected. Add your Adam instead of your Anna" + "anna_with_adam": "Both Anna and Adam detected. Add your Adam instead of your Anna", + "not_the_same_smile": "The configured Smile ID does not match the Smile ID on the requested IP address.", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "entity": { diff --git a/tests/components/plugwise/conftest.py b/tests/components/plugwise/conftest.py index dead58e0581..e0ada8ea849 100644 --- a/tests/components/plugwise/conftest.py +++ b/tests/components/plugwise/conftest.py @@ -77,9 +77,15 @@ def mock_smile_adam() -> Generator[MagicMock]: """Create a Mock Adam environment for testing exceptions.""" chosen_env = "m_adam_multiple_devices_per_zone" - with patch( - "homeassistant.components.plugwise.coordinator.Smile", autospec=True - ) as smile_mock: + with ( + patch( + "homeassistant.components.plugwise.coordinator.Smile", autospec=True + ) as smile_mock, + patch( + "homeassistant.components.plugwise.config_flow.Smile", + new=smile_mock, + ), + ): smile = smile_mock.return_value smile.gateway_id = "fe799307f1624099878210aa0b9f1475" diff --git a/tests/components/plugwise/test_config_flow.py b/tests/components/plugwise/test_config_flow.py index 9e1e29f4a48..1f30fc972bb 100644 --- a/tests/components/plugwise/test_config_flow.py +++ b/tests/components/plugwise/test_config_flow.py @@ -14,7 +14,7 @@ import pytest from homeassistant.components.plugwise.const import DEFAULT_PORT, DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF, ConfigFlowResult from homeassistant.const import ( CONF_HOST, CONF_NAME, @@ -35,7 +35,7 @@ TEST_PASSWORD = "test_password" TEST_PORT = 81 TEST_USERNAME = "smile" TEST_USERNAME2 = "stretch" -MOCK_SMILE_ID = "smile12345" +TEST_SMILE_HOST = "smile12345" TEST_DISCOVERY = ZeroconfServiceInfo( ip_address=ip_address(TEST_HOST), @@ -129,7 +129,7 @@ async def test_form( assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smile_config_flow.connect.mock_calls) == 1 - assert result2["result"].unique_id == MOCK_SMILE_ID + assert result2["result"].unique_id == TEST_SMILE_HOST @pytest.mark.parametrize( @@ -175,7 +175,7 @@ async def test_zeroconf_flow( assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smile_config_flow.connect.mock_calls) == 1 - assert result2["result"].unique_id == MOCK_SMILE_ID + assert result2["result"].unique_id == TEST_SMILE_HOST async def test_zeroconf_flow_stretch( @@ -274,7 +274,7 @@ async def test_flow_errors( side_effect: Exception, reason: str, ) -> None: - """Test we handle invalid auth.""" + """Test we handle each exception error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={CONF_SOURCE: SOURCE_USER}, @@ -285,6 +285,7 @@ async def test_flow_errors( assert "flow_id" in result mock_smile_config_flow.connect.side_effect = side_effect + result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_HOST: TEST_HOST, CONF_PASSWORD: TEST_PASSWORD}, @@ -330,7 +331,7 @@ async def test_user_abort_existing_anna( CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, }, - unique_id=MOCK_SMILE_ID, + unique_id=TEST_SMILE_HOST, ) entry.add_to_hass(hass) @@ -435,3 +436,91 @@ async def test_zeroconf_abort_anna_with_adam(hass: HomeAssistant) -> None: flows_in_progress = hass.config_entries.flow._handler_progress_index[DOMAIN] assert len(flows_in_progress) == 1 assert list(flows_in_progress)[0].product == "smile_open_therm" + + +async def _start_reconfigure_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + host_ip: str, +) -> ConfigFlowResult: + """Initialize a reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await mock_config_entry.start_reconfigure_flow(hass) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure" + + return await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], {CONF_HOST: host_ip} + ) + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_smile_adam: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + result = await _start_reconfigure_flow(hass, mock_config_entry, TEST_HOST) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + assert mock_config_entry.data.get(CONF_HOST) == TEST_HOST + + +async def test_reconfigure_flow_smile_mismatch( + hass: HomeAssistant, + mock_smile_adam: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow aborts on other Smile ID.""" + mock_smile_adam.smile_hostname = TEST_SMILE_HOST + + result = await _start_reconfigure_flow(hass, mock_config_entry, TEST_HOST) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "not_the_same_smile" + + +@pytest.mark.parametrize( + ("side_effect", "reason"), + [ + (ConnectionFailedError, "cannot_connect"), + (InvalidAuthentication, "invalid_auth"), + (InvalidSetupError, "invalid_setup"), + (InvalidXMLError, "response_error"), + (RuntimeError, "unknown"), + (UnsupportedDeviceError, "unsupported"), + ], +) +async def test_reconfigure_flow_connect_errors( + hass: HomeAssistant, + mock_smile_adam: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + reason: str, +) -> None: + """Test we handle each reconfigure exception error and recover.""" + + mock_smile_adam.connect.side_effect = side_effect + + result = await _start_reconfigure_flow(hass, mock_config_entry, TEST_HOST) + + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": reason} + assert result.get("step_id") == "reconfigure" + + mock_smile_adam.connect.side_effect = None + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: TEST_HOST} + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + + assert mock_config_entry.data.get(CONF_HOST) == TEST_HOST From 944ad9022d349f4e6a1909ad98299246f6fb8bb1 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sat, 21 Dec 2024 18:04:09 +0000 Subject: [PATCH 589/677] Bump tplink python-kasa dependency to 0.9.0 (#133735) Release notes: https://github.com/python-kasa/python-kasa/releases/tag/0.9.0 --- homeassistant/components/tplink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 6ce46c0d488..65061882027 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -300,5 +300,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink", "iot_class": "local_polling", "loggers": ["kasa"], - "requirements": ["python-kasa[speedups]==0.8.1"] + "requirements": ["python-kasa[speedups]==0.9.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index e9376072875..b1aa085ee52 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2375,7 +2375,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.8.1 +python-kasa[speedups]==0.9.0 # homeassistant.components.linkplay python-linkplay==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c0761fafc8b..3fdd84009fc 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1911,7 +1911,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.8.1 +python-kasa[speedups]==0.9.0 # homeassistant.components.linkplay python-linkplay==0.1.1 From 0037799bfe50e812cb141b4a21b0b8e308609941 Mon Sep 17 00:00:00 2001 From: "Glenn Vandeuren (aka Iondependent)" Date: Sat, 21 Dec 2024 19:28:11 +0100 Subject: [PATCH 590/677] Change niko_home_control library to nhc to get push updates (#132750) Co-authored-by: Joost Lekkerkerker Co-authored-by: VandeurenGlenn <8685280+VandeurenGlenn@users.noreply.github.com> Co-authored-by: Joostlek --- .../components/niko_home_control/__init__.py | 87 +++++++++---------- .../niko_home_control/config_flow.py | 14 +-- .../components/niko_home_control/const.py | 3 + .../components/niko_home_control/light.py | 57 ++++++------ .../niko_home_control/manifest.json | 4 +- homeassistant/generated/integrations.json | 2 +- requirements_all.txt | 6 +- requirements_test_all.txt | 6 +- .../components/niko_home_control/conftest.py | 2 +- .../niko_home_control/test_config_flow.py | 6 +- .../components/niko_home_control/test_init.py | 36 ++++++++ 11 files changed, 131 insertions(+), 92 deletions(-) create mode 100644 tests/components/niko_home_control/test_init.py diff --git a/homeassistant/components/niko_home_control/__init__.py b/homeassistant/components/niko_home_control/__init__.py index bdbb8d6b85f..0bc1b117a70 100644 --- a/homeassistant/components/niko_home_control/__init__.py +++ b/homeassistant/components/niko_home_control/__init__.py @@ -2,35 +2,29 @@ from __future__ import annotations -from datetime import timedelta -import logging - from nclib.errors import NetcatError -from nikohomecontrol import NikoHomeControl +from nhc.controller import NHCController from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.util import Throttle +from homeassistant.helpers import entity_registry as er + +from .const import _LOGGER PLATFORMS: list[Platform] = [Platform.LIGHT] -type NikoHomeControlConfigEntry = ConfigEntry[NikoHomeControlData] - - -_LOGGER = logging.getLogger(__name__) -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1) +type NikoHomeControlConfigEntry = ConfigEntry[NHCController] async def async_setup_entry( hass: HomeAssistant, entry: NikoHomeControlConfigEntry ) -> bool: """Set Niko Home Control from a config entry.""" + controller = NHCController(entry.data[CONF_HOST]) try: - controller = NikoHomeControl({"ip": entry.data[CONF_HOST], "port": 8000}) - niko_data = NikoHomeControlData(hass, controller) - await niko_data.async_update() + await controller.connect() except NetcatError as err: raise ConfigEntryNotReady("cannot connect to controller.") from err except OSError as err: @@ -38,46 +32,45 @@ async def async_setup_entry( "unknown error while connecting to controller." ) from err - entry.runtime_data = niko_data + entry.runtime_data = controller await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True +async def async_migrate_entry( + hass: HomeAssistant, config_entry: NikoHomeControlConfigEntry +) -> bool: + """Migrate old entry.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + + if config_entry.minor_version < 2: + registry = er.async_get(hass) + entries = er.async_entries_for_config_entry(registry, config_entry.entry_id) + + for entry in entries: + if entry.unique_id.startswith("light-"): + action_id = entry.unique_id.split("-")[-1] + new_unique_id = f"{config_entry.entry_id}-{action_id}" + registry.async_update_entity( + entry.entity_id, new_unique_id=new_unique_id + ) + + hass.config_entries.async_update_entry(config_entry, minor_version=2) + + _LOGGER.debug( + "Migration to configuration version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + return True + + async def async_unload_entry( hass: HomeAssistant, entry: NikoHomeControlConfigEntry ) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -class NikoHomeControlData: - """The class for handling data retrieval.""" - - def __init__(self, hass, nhc): - """Set up Niko Home Control Data object.""" - self.nhc = nhc - self.hass = hass - self.available = True - self.data = {} - self._system_info = None - - @Throttle(MIN_TIME_BETWEEN_UPDATES) - async def async_update(self): - """Get the latest data from the NikoHomeControl API.""" - _LOGGER.debug("Fetching async state in bulk") - try: - self.data = await self.hass.async_add_executor_job( - self.nhc.list_actions_raw - ) - self.available = True - except OSError as ex: - _LOGGER.error("Unable to retrieve data from Niko, %s", str(ex)) - self.available = False - - def get_state(self, aid): - """Find and filter state based on action id.""" - for state in self.data: - if state["id"] == aid: - return state["value1"] - _LOGGER.error("Failed to retrieve state off unknown light") - return None diff --git a/homeassistant/components/niko_home_control/config_flow.py b/homeassistant/components/niko_home_control/config_flow.py index 9174a932534..f37e5e9248a 100644 --- a/homeassistant/components/niko_home_control/config_flow.py +++ b/homeassistant/components/niko_home_control/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from nikohomecontrol import NikoHomeControlConnection +from nhc.controller import NHCController import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -19,10 +19,12 @@ DATA_SCHEMA = vol.Schema( ) -def test_connection(host: str) -> str | None: +async def test_connection(host: str) -> str | None: """Test if we can connect to the Niko Home Control controller.""" + + controller = NHCController(host, 8000) try: - NikoHomeControlConnection(host, 8000) + await controller.connect() except Exception: # noqa: BLE001 return "cannot_connect" return None @@ -31,7 +33,7 @@ def test_connection(host: str) -> str | None: class NikoHomeControlConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Niko Home Control.""" - VERSION = 1 + MINOR_VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -41,7 +43,7 @@ class NikoHomeControlConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) - error = test_connection(user_input[CONF_HOST]) + error = await test_connection(user_input[CONF_HOST]) if not error: return self.async_create_entry( title="Niko Home Control", @@ -56,7 +58,7 @@ class NikoHomeControlConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" self._async_abort_entries_match({CONF_HOST: import_info[CONF_HOST]}) - error = test_connection(import_info[CONF_HOST]) + error = await test_connection(import_info[CONF_HOST]) if not error: return self.async_create_entry( diff --git a/homeassistant/components/niko_home_control/const.py b/homeassistant/components/niko_home_control/const.py index 202b031b9a2..82b7ce7ed38 100644 --- a/homeassistant/components/niko_home_control/const.py +++ b/homeassistant/components/niko_home_control/const.py @@ -1,3 +1,6 @@ """Constants for niko_home_control integration.""" +import logging + DOMAIN = "niko_home_control" +_LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/niko_home_control/light.py b/homeassistant/components/niko_home_control/light.py index f2bf302eab7..29b952fcb77 100644 --- a/homeassistant/components/niko_home_control/light.py +++ b/homeassistant/components/niko_home_control/light.py @@ -2,10 +2,9 @@ from __future__ import annotations -from datetime import timedelta -import logging from typing import Any +from nhc.light import NHCLight import voluptuous as vol from homeassistant.components.light import ( @@ -24,12 +23,9 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import NikoHomeControlConfigEntry +from . import NHCController, NikoHomeControlConfigEntry from .const import DOMAIN -_LOGGER = logging.getLogger(__name__) -SCAN_INTERVAL = timedelta(seconds=30) - # delete after 2025.7.0 PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) @@ -87,43 +83,52 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Niko Home Control light entry.""" - niko_data = entry.runtime_data + controller = entry.runtime_data async_add_entities( - NikoHomeControlLight(light, niko_data) for light in niko_data.nhc.list_actions() + NikoHomeControlLight(light, controller, entry.entry_id) + for light in controller.lights ) class NikoHomeControlLight(LightEntity): - """Representation of an Niko Light.""" + """Representation of a Niko Light.""" - def __init__(self, light, data): + def __init__( + self, action: NHCLight, controller: NHCController, unique_id: str + ) -> None: """Set up the Niko Home Control light platform.""" - self._data = data - self._light = light - self._attr_unique_id = f"light-{light.id}" - self._attr_name = light.name - self._attr_is_on = light.is_on + self._controller = controller + self._action = action + self._attr_unique_id = f"{unique_id}-{action.id}" + self._attr_name = action.name + self._attr_is_on = action.is_on self._attr_color_mode = ColorMode.ONOFF self._attr_supported_color_modes = {ColorMode.ONOFF} - if light._state["type"] == 2: # noqa: SLF001 + self._attr_should_poll = False + if action.is_dimmable: self._attr_color_mode = ColorMode.BRIGHTNESS self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + self.async_on_remove( + self._controller.register_callback( + self._action.id, self.async_update_callback + ) + ) + def turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" - _LOGGER.debug("Turn on: %s", self.name) - self._light.turn_on(kwargs.get(ATTR_BRIGHTNESS, 255) / 2.55) + self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS, 255) / 2.55) def turn_off(self, **kwargs: Any) -> None: """Instruct the light to turn off.""" - _LOGGER.debug("Turn off: %s", self.name) - self._light.turn_off() + self._action.turn_off() - async def async_update(self) -> None: - """Get the latest data from NikoHomeControl API.""" - await self._data.async_update() - state = self._data.get_state(self._light.id) - self._attr_is_on = state != 0 + async def async_update_callback(self, state: int) -> None: + """Handle updates from the controller.""" + self._attr_is_on = state > 0 if brightness_supported(self.supported_color_modes): - self._attr_brightness = state * 2.55 + self._attr_brightness = round(state * 2.55) + self.async_write_ha_state() diff --git a/homeassistant/components/niko_home_control/manifest.json b/homeassistant/components/niko_home_control/manifest.json index 194596d534f..d252a11b38e 100644 --- a/homeassistant/components/niko_home_control/manifest.json +++ b/homeassistant/components/niko_home_control/manifest.json @@ -4,7 +4,7 @@ "codeowners": ["@VandeurenGlenn"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/niko_home_control", - "iot_class": "local_polling", + "iot_class": "local_push", "loggers": ["nikohomecontrol"], - "requirements": ["niko-home-control==0.2.1"] + "requirements": ["nhc==0.3.2"] } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index f037b8d7ce6..ad4af2f024c 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -4160,7 +4160,7 @@ "name": "Niko Home Control", "integration_type": "hub", "config_flow": true, - "iot_class": "local_polling" + "iot_class": "local_push" }, "nilu": { "name": "Norwegian Institute for Air Research (NILU)", diff --git a/requirements_all.txt b/requirements_all.txt index b1aa085ee52..4cf22eaf153 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1463,15 +1463,15 @@ nextcord==2.6.0 # homeassistant.components.nextdns nextdns==4.0.0 +# homeassistant.components.niko_home_control +nhc==0.3.2 + # homeassistant.components.nibe_heatpump nibe==2.14.0 # homeassistant.components.nice_go nice-go==1.0.0 -# homeassistant.components.niko_home_control -niko-home-control==0.2.1 - # homeassistant.components.nilu niluclient==0.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3fdd84009fc..747594117e6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1226,15 +1226,15 @@ nextcord==2.6.0 # homeassistant.components.nextdns nextdns==4.0.0 +# homeassistant.components.niko_home_control +nhc==0.3.2 + # homeassistant.components.nibe_heatpump nibe==2.14.0 # homeassistant.components.nice_go nice-go==1.0.0 -# homeassistant.components.niko_home_control -niko-home-control==0.2.1 - # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 diff --git a/tests/components/niko_home_control/conftest.py b/tests/components/niko_home_control/conftest.py index 932480ac710..63307a88e8a 100644 --- a/tests/components/niko_home_control/conftest.py +++ b/tests/components/niko_home_control/conftest.py @@ -26,7 +26,7 @@ def mock_niko_home_control_connection() -> Generator[AsyncMock]: """Mock a NHC client.""" with ( patch( - "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + "homeassistant.components.niko_home_control.config_flow.NHCController", autospec=True, ) as mock_client, ): diff --git a/tests/components/niko_home_control/test_config_flow.py b/tests/components/niko_home_control/test_config_flow.py index 8220ee15e02..f911f4ebb1a 100644 --- a/tests/components/niko_home_control/test_config_flow.py +++ b/tests/components/niko_home_control/test_config_flow.py @@ -46,7 +46,7 @@ async def test_cannot_connect(hass: HomeAssistant, mock_setup_entry: AsyncMock) assert result["errors"] == {} with patch( - "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + "homeassistant.components.niko_home_control.config_flow.NHCController.connect", side_effect=Exception, ): result = await hass.config_entries.flow.async_configure( @@ -58,7 +58,7 @@ async def test_cannot_connect(hass: HomeAssistant, mock_setup_entry: AsyncMock) assert result["errors"] == {"base": "cannot_connect"} with patch( - "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection" + "homeassistant.components.niko_home_control.config_flow.NHCController.connect", ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -114,7 +114,7 @@ async def test_import_cannot_connect( """Test the cannot connect error.""" with patch( - "homeassistant.components.niko_home_control.config_flow.NikoHomeControlConnection", + "homeassistant.components.niko_home_control.config_flow.NHCController.connect", side_effect=Exception, ): result = await hass.config_entries.flow.async_init( diff --git a/tests/components/niko_home_control/test_init.py b/tests/components/niko_home_control/test_init.py new file mode 100644 index 00000000000..422b7d7c30c --- /dev/null +++ b/tests/components/niko_home_control/test_init.py @@ -0,0 +1,36 @@ +"""Test init.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.niko_home_control.const import DOMAIN +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_migrate_entry( + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_setup_entry: AsyncMock +) -> None: + """Validate that the unique_id is migrated to the new unique_id.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + minor_version=1, + data={CONF_HOST: "192.168.0.123"}, + ) + config_entry.add_to_hass(hass) + entity_entry = entity_registry.async_get_or_create( + LIGHT_DOMAIN, DOMAIN, "light-1", config_entry=config_entry + ) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entity_entry = entity_registry.async_get(entity_entry.entity_id) + + assert config_entry.minor_version == 2 + assert ( + entity_registry.async_get(entity_entry.entity_id).unique_id + == f"{config_entry.entry_id}-1" + ) From f2df57e230fef61d5d404c9cd93096429d825da6 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 21 Dec 2024 19:36:13 +0100 Subject: [PATCH 591/677] Add DHCP discovery to Withings (#133737) --- .../components/withings/manifest.json | 5 ++ homeassistant/generated/dhcp.py | 4 ++ tests/components/withings/test_config_flow.py | 65 ++++++++++++++++++- 3 files changed, 73 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index 886eb66f5e0..ad9b9a6fe71 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -5,6 +5,11 @@ "codeowners": ["@joostlek"], "config_flow": true, "dependencies": ["application_credentials", "http", "webhook"], + "dhcp": [ + { + "macaddress": "0024E4*" + } + ], "documentation": "https://www.home-assistant.io/integrations/withings", "iot_class": "cloud_push", "loggers": ["aiowithings"], diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 22a09945a80..67531ceced8 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -1119,6 +1119,10 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "vicare", "macaddress": "B87424*", }, + { + "domain": "withings", + "macaddress": "0024E4*", + }, { "domain": "wiz", "registered_devices": True, diff --git a/tests/components/withings/test_config_flow.py b/tests/components/withings/test_config_flow.py index 39c8340a78e..d0ad5b2659a 100644 --- a/tests/components/withings/test_config_flow.py +++ b/tests/components/withings/test_config_flow.py @@ -4,8 +4,9 @@ from unittest.mock import AsyncMock, patch import pytest +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.withings.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -293,3 +294,65 @@ async def test_config_flow_with_invalid_credentials( assert result assert result["type"] is FlowResultType.ABORT assert result["reason"] == "oauth_error" + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_dhcp( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Check DHCP discovery.""" + + service_info = DhcpServiceInfo( + hostname="device", + ip="192.168.0.1", + macaddress="0024e4bd30de", + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_DHCP}, data=service_info + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["type"] is FlowResultType.EXTERNAL_STEP + assert result["url"] == ( + "https://account.withings.com/oauth2_user/authorize2?" + f"response_type=code&client_id={CLIENT_ID}&" + "redirect_uri=https://example.com/auth/external/callback&" + f"state={state}" + "&scope=user.info,user.metrics,user.activity,user.sleepevents" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + "https://wbsapi.withings.net/v2/oauth2", + json={ + "body": { + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + "userid": 600, + }, + }, + ) + with patch( + "homeassistant.components.withings.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + + assert result["type"] is FlowResultType.CREATE_ENTRY From 04276d352317fff120d2c98c446e83d85bb24cd0 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 20:16:18 +0100 Subject: [PATCH 592/677] Add number platform to Peblar Rocksolid EV Chargers integration (#133739) --- homeassistant/components/peblar/__init__.py | 7 +- .../components/peblar/coordinator.py | 27 ++++- .../components/peblar/diagnostics.py | 3 +- homeassistant/components/peblar/icons.json | 5 + homeassistant/components/peblar/number.py | 104 ++++++++++++++++++ homeassistant/components/peblar/sensor.py | 42 ++++--- homeassistant/components/peblar/strings.json | 5 + tests/components/peblar/conftest.py | 4 + .../peblar/fixtures/ev_interface.json | 7 ++ .../peblar/snapshots/test_diagnostics.ambr | 7 ++ .../peblar/snapshots/test_number.ambr | 58 ++++++++++ tests/components/peblar/test_number.py | 35 ++++++ 12 files changed, 273 insertions(+), 31 deletions(-) create mode 100644 homeassistant/components/peblar/number.py create mode 100644 tests/components/peblar/fixtures/ev_interface.json create mode 100644 tests/components/peblar/snapshots/test_number.ambr create mode 100644 tests/components/peblar/test_number.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index 79ffd236f32..2ab255037ac 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -22,13 +22,14 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession from .const import DOMAIN from .coordinator import ( PeblarConfigEntry, - PeblarMeterDataUpdateCoordinator, + PeblarDataUpdateCoordinator, PeblarRuntimeData, PeblarUserConfigurationDataUpdateCoordinator, PeblarVersionDataUpdateCoordinator, ) PLATFORMS = [ + Platform.NUMBER, Platform.SELECT, Platform.SENSOR, Platform.UPDATE, @@ -57,7 +58,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo ) from err # Setup the data coordinators - meter_coordinator = PeblarMeterDataUpdateCoordinator(hass, entry, api) + meter_coordinator = PeblarDataUpdateCoordinator(hass, entry, api) user_configuration_coordinator = PeblarUserConfigurationDataUpdateCoordinator( hass, entry, peblar ) @@ -70,7 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo # Store the runtime data entry.runtime_data = PeblarRuntimeData( - meter_coordinator=meter_coordinator, + data_coordinator=meter_coordinator, system_information=system_information, user_configuraton_coordinator=user_configuration_coordinator, version_coordinator=version_coordinator, diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index a01e3d6b41a..33c66266e47 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -9,6 +9,7 @@ from peblar import ( Peblar, PeblarApi, PeblarError, + PeblarEVInterface, PeblarMeter, PeblarUserConfiguration, PeblarVersions, @@ -26,7 +27,7 @@ from .const import LOGGER class PeblarRuntimeData: """Class to hold runtime data.""" - meter_coordinator: PeblarMeterDataUpdateCoordinator + data_coordinator: PeblarDataUpdateCoordinator system_information: PeblarSystemInformation user_configuraton_coordinator: PeblarUserConfigurationDataUpdateCoordinator version_coordinator: PeblarVersionDataUpdateCoordinator @@ -43,6 +44,19 @@ class PeblarVersionInformation: available: PeblarVersions +@dataclass(kw_only=True) +class PeblarData: + """Class to hold active charging related information of Peblar. + + This is data that needs to be polled and updated at a relatively high + frequency in order for this integration to function correctly. + All this data is updated at the same time by a single coordinator. + """ + + ev: PeblarEVInterface + meter: PeblarMeter + + class PeblarVersionDataUpdateCoordinator( DataUpdateCoordinator[PeblarVersionInformation] ): @@ -72,8 +86,8 @@ class PeblarVersionDataUpdateCoordinator( raise UpdateFailed(err) from err -class PeblarMeterDataUpdateCoordinator(DataUpdateCoordinator[PeblarMeter]): - """Class to manage fetching Peblar meter data.""" +class PeblarDataUpdateCoordinator(DataUpdateCoordinator[PeblarData]): + """Class to manage fetching Peblar active data.""" def __init__( self, hass: HomeAssistant, entry: PeblarConfigEntry, api: PeblarApi @@ -88,10 +102,13 @@ class PeblarMeterDataUpdateCoordinator(DataUpdateCoordinator[PeblarMeter]): update_interval=timedelta(seconds=10), ) - async def _async_update_data(self) -> PeblarMeter: + async def _async_update_data(self) -> PeblarData: """Fetch data from the Peblar device.""" try: - return await self.api.meter() + return PeblarData( + ev=await self.api.ev_interface(), + meter=await self.api.meter(), + ) except PeblarError as err: raise UpdateFailed(err) from err diff --git a/homeassistant/components/peblar/diagnostics.py b/homeassistant/components/peblar/diagnostics.py index 6c4531c0e09..ab18956ecbb 100644 --- a/homeassistant/components/peblar/diagnostics.py +++ b/homeassistant/components/peblar/diagnostics.py @@ -16,7 +16,8 @@ async def async_get_config_entry_diagnostics( return { "system_information": entry.runtime_data.system_information.to_dict(), "user_configuration": entry.runtime_data.user_configuraton_coordinator.data.to_dict(), - "meter": entry.runtime_data.meter_coordinator.data.to_dict(), + "ev": entry.runtime_data.data_coordinator.data.ev.to_dict(), + "meter": entry.runtime_data.data_coordinator.data.meter.to_dict(), "versions": { "available": entry.runtime_data.version_coordinator.data.available.to_dict(), "current": entry.runtime_data.version_coordinator.data.current.to_dict(), diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json index b052eb6de4d..3ead366f4bf 100644 --- a/homeassistant/components/peblar/icons.json +++ b/homeassistant/components/peblar/icons.json @@ -1,5 +1,10 @@ { "entity": { + "number": { + "charge_current_limit": { + "default": "mdi:speedometer" + } + }, "select": { "smart_charging": { "default": "mdi:lightning-bolt", diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py new file mode 100644 index 00000000000..72c7b02a6e0 --- /dev/null +++ b/homeassistant/components/peblar/number.py @@ -0,0 +1,104 @@ +"""Support for Peblar numbers.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from peblar import PeblarApi + +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, +) +from homeassistant.const import EntityCategory, UnitOfElectricCurrent +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ( + PeblarConfigEntry, + PeblarData, + PeblarDataUpdateCoordinator, + PeblarRuntimeData, +) + + +@dataclass(frozen=True, kw_only=True) +class PeblarNumberEntityDescription(NumberEntityDescription): + """Describe a Peblar number.""" + + native_max_value_fn: Callable[[PeblarRuntimeData], int] + set_value_fn: Callable[[PeblarApi, float], Awaitable[Any]] + value_fn: Callable[[PeblarData], int | None] + + +DESCRIPTIONS = [ + PeblarNumberEntityDescription( + key="charge_current_limit", + translation_key="charge_current_limit", + device_class=NumberDeviceClass.CURRENT, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=6, + native_max_value_fn=lambda x: x.system_information.hardware_max_current, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + set_value_fn=lambda x, v: x.ev_interface(charge_current_limit=int(v) * 1000), + value_fn=lambda x: round(x.ev.charge_current_limit_actual / 1000), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar number based on a config entry.""" + async_add_entities( + PeblarNumberEntity( + entry=entry, + description=description, + ) + for description in DESCRIPTIONS + ) + + +class PeblarNumberEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], NumberEntity): + """Defines a Peblar number.""" + + entity_description: PeblarNumberEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarNumberEntityDescription, + ) -> None: + """Initialize the Peblar entity.""" + super().__init__(entry.runtime_data.data_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + self._attr_native_max_value = description.native_max_value_fn( + entry.runtime_data + ) + + @property + def native_value(self) -> int | None: + """Return the number value.""" + return self.entity_description.value_fn(self.coordinator.data) + + async def async_set_native_value(self, value: float) -> None: + """Change to new number value.""" + await self.entity_description.set_value_fn(self.coordinator.api, value) + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index bb9fe9d4937..285a8dd5ea0 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from peblar import PeblarMeter, PeblarUserConfiguration +from peblar import PeblarUserConfiguration from homeassistant.components.sensor import ( SensorDeviceClass, @@ -26,15 +26,15 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN -from .coordinator import PeblarConfigEntry, PeblarMeterDataUpdateCoordinator +from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator @dataclass(frozen=True, kw_only=True) class PeblarSensorDescription(SensorEntityDescription): - """Describe an Peblar sensor.""" + """Describe a Peblar sensor.""" has_fn: Callable[[PeblarUserConfiguration], bool] = lambda _: True - value_fn: Callable[[PeblarMeter], int | None] + value_fn: Callable[[PeblarData], int | None] DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( @@ -48,7 +48,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=1, suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - value_fn=lambda x: x.current_phase_1, + value_fn=lambda x: x.meter.current_phase_1, ), PeblarSensorDescription( key="current_phase_1", @@ -61,7 +61,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=1, suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - value_fn=lambda x: x.current_phase_1, + value_fn=lambda x: x.meter.current_phase_1, ), PeblarSensorDescription( key="current_phase_2", @@ -74,7 +74,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=1, suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - value_fn=lambda x: x.current_phase_2, + value_fn=lambda x: x.meter.current_phase_2, ), PeblarSensorDescription( key="current_phase_3", @@ -87,7 +87,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=1, suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - value_fn=lambda x: x.current_phase_3, + value_fn=lambda x: x.meter.current_phase_3, ), PeblarSensorDescription( key="energy_session", @@ -97,7 +97,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=2, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - value_fn=lambda x: x.energy_session, + value_fn=lambda x: x.meter.energy_session, ), PeblarSensorDescription( key="energy_total", @@ -108,14 +108,14 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.TOTAL_INCREASING, suggested_display_precision=2, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - value_fn=lambda x: x.energy_total, + value_fn=lambda x: x.meter.energy_total, ), PeblarSensorDescription( key="power_total", device_class=SensorDeviceClass.POWER, native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.power_total, + value_fn=lambda x: x.meter.power_total, ), PeblarSensorDescription( key="power_phase_1", @@ -126,7 +126,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases >= 2, native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.power_phase_1, + value_fn=lambda x: x.meter.power_phase_1, ), PeblarSensorDescription( key="power_phase_2", @@ -137,7 +137,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases >= 2, native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.power_phase_2, + value_fn=lambda x: x.meter.power_phase_2, ), PeblarSensorDescription( key="power_phase_3", @@ -148,7 +148,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases == 3, native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.power_phase_3, + value_fn=lambda x: x.meter.power_phase_3, ), PeblarSensorDescription( key="voltage", @@ -158,7 +158,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases == 1, native_unit_of_measurement=UnitOfElectricPotential.VOLT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.voltage_phase_1, + value_fn=lambda x: x.meter.voltage_phase_1, ), PeblarSensorDescription( key="voltage_phase_1", @@ -169,7 +169,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases >= 2, native_unit_of_measurement=UnitOfElectricPotential.VOLT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.voltage_phase_1, + value_fn=lambda x: x.meter.voltage_phase_1, ), PeblarSensorDescription( key="voltage_phase_2", @@ -180,7 +180,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases >= 2, native_unit_of_measurement=UnitOfElectricPotential.VOLT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.voltage_phase_2, + value_fn=lambda x: x.meter.voltage_phase_2, ), PeblarSensorDescription( key="voltage_phase_3", @@ -191,7 +191,7 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( has_fn=lambda x: x.connected_phases == 3, native_unit_of_measurement=UnitOfElectricPotential.VOLT, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda x: x.voltage_phase_3, + value_fn=lambda x: x.meter.voltage_phase_3, ), ) @@ -209,9 +209,7 @@ async def async_setup_entry( ) -class PeblarSensorEntity( - CoordinatorEntity[PeblarMeterDataUpdateCoordinator], SensorEntity -): +class PeblarSensorEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], SensorEntity): """Defines a Peblar sensor.""" entity_description: PeblarSensorDescription @@ -224,7 +222,7 @@ class PeblarSensorEntity( description: PeblarSensorDescription, ) -> None: """Initialize the Peblar entity.""" - super().__init__(entry.runtime_data.meter_coordinator) + super().__init__(entry.runtime_data.data_coordinator) self.entity_description = description self._attr_unique_id = f"{entry.unique_id}_{description.key}" self._attr_device_info = DeviceInfo( diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 02aee0eacc9..e4311df17cd 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -33,6 +33,11 @@ } }, "entity": { + "number": { + "charge_current_limit": { + "name": "Charge limit" + } + }, "select": { "smart_charging": { "name": "Smart charging", diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index 8831697f74e..b8e77da08cd 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -7,6 +7,7 @@ from contextlib import nullcontext from unittest.mock import MagicMock, patch from peblar import ( + PeblarEVInterface, PeblarMeter, PeblarSystemInformation, PeblarUserConfiguration, @@ -64,6 +65,9 @@ def mock_peblar() -> Generator[MagicMock]: ) api = peblar.rest_api.return_value + api.ev_interface.return_value = PeblarEVInterface.from_json( + load_fixture("ev_interface.json", DOMAIN) + ) api.meter.return_value = PeblarMeter.from_json( load_fixture("meter.json", DOMAIN) ) diff --git a/tests/components/peblar/fixtures/ev_interface.json b/tests/components/peblar/fixtures/ev_interface.json new file mode 100644 index 00000000000..901807a7068 --- /dev/null +++ b/tests/components/peblar/fixtures/ev_interface.json @@ -0,0 +1,7 @@ +{ + "ChargeCurrentLimit": 16000, + "ChargeCurrentLimitActual": 6000, + "ChargeCurrentLimitSource": "Current limiter", + "CpState": "State C", + "Force1Phase": false +} diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr index 08d4d3ac6c6..625bb196402 100644 --- a/tests/components/peblar/snapshots/test_diagnostics.ambr +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -1,6 +1,13 @@ # serializer version: 1 # name: test_diagnostics dict({ + 'ev': dict({ + 'ChargeCurrentLimit': 16000, + 'ChargeCurrentLimitActual': 6000, + 'ChargeCurrentLimitSource': 'Current limiter', + 'CpState': 'State C', + 'Force1Phase': False, + }), 'meter': dict({ 'CurrentPhase1': 14242, 'CurrentPhase2': 0, diff --git a/tests/components/peblar/snapshots/test_number.ambr b/tests/components/peblar/snapshots/test_number.ambr new file mode 100644 index 00000000000..50b44583d1c --- /dev/null +++ b/tests/components/peblar/snapshots/test_number.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_entities[number][number.peblar_ev_charger_charge_limit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 16, + 'min': 6, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.peblar_ev_charger_charge_limit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge limit', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_current_limit', + 'unique_id': '23-45-A4O-MOF_charge_current_limit', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[number][number.peblar_ev_charger_charge_limit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Peblar EV Charger Charge limit', + 'max': 16, + 'min': 6, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.peblar_ev_charger_charge_limit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- diff --git a/tests/components/peblar/test_number.py b/tests/components/peblar/test_number.py new file mode 100644 index 00000000000..4c2ff928210 --- /dev/null +++ b/tests/components/peblar/test_number.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar number platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.NUMBER], indirect=True) +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the number entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From c67e2047e327910325fb5e7ea832f1b1adc638d9 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 20:28:55 +0100 Subject: [PATCH 593/677] Add switch platform to Peblar Rocksolid EV Chargers integration (#133749) --- homeassistant/components/peblar/__init__.py | 1 + .../components/peblar/coordinator.py | 3 + .../components/peblar/diagnostics.py | 1 + homeassistant/components/peblar/icons.json | 5 + homeassistant/components/peblar/strings.json | 5 + homeassistant/components/peblar/switch.py | 102 ++++++++++++++++++ tests/components/peblar/conftest.py | 4 + tests/components/peblar/fixtures/system.json | 12 +++ .../peblar/snapshots/test_diagnostics.ambr | 12 +++ .../peblar/snapshots/test_switch.ambr | 47 ++++++++ tests/components/peblar/test_switch.py | 35 ++++++ 11 files changed, 227 insertions(+) create mode 100644 homeassistant/components/peblar/switch.py create mode 100644 tests/components/peblar/fixtures/system.json create mode 100644 tests/components/peblar/snapshots/test_switch.ambr create mode 100644 tests/components/peblar/test_switch.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index 2ab255037ac..854565081e8 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -32,6 +32,7 @@ PLATFORMS = [ Platform.NUMBER, Platform.SELECT, Platform.SENSOR, + Platform.SWITCH, Platform.UPDATE, ] diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index 33c66266e47..e2b16e1e62a 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -11,6 +11,7 @@ from peblar import ( PeblarError, PeblarEVInterface, PeblarMeter, + PeblarSystem, PeblarUserConfiguration, PeblarVersions, ) @@ -55,6 +56,7 @@ class PeblarData: ev: PeblarEVInterface meter: PeblarMeter + system: PeblarSystem class PeblarVersionDataUpdateCoordinator( @@ -108,6 +110,7 @@ class PeblarDataUpdateCoordinator(DataUpdateCoordinator[PeblarData]): return PeblarData( ev=await self.api.ev_interface(), meter=await self.api.meter(), + system=await self.api.system(), ) except PeblarError as err: raise UpdateFailed(err) from err diff --git a/homeassistant/components/peblar/diagnostics.py b/homeassistant/components/peblar/diagnostics.py index ab18956ecbb..32716148c3f 100644 --- a/homeassistant/components/peblar/diagnostics.py +++ b/homeassistant/components/peblar/diagnostics.py @@ -18,6 +18,7 @@ async def async_get_config_entry_diagnostics( "user_configuration": entry.runtime_data.user_configuraton_coordinator.data.to_dict(), "ev": entry.runtime_data.data_coordinator.data.ev.to_dict(), "meter": entry.runtime_data.data_coordinator.data.meter.to_dict(), + "system": entry.runtime_data.data_coordinator.data.system.to_dict(), "versions": { "available": entry.runtime_data.version_coordinator.data.available.to_dict(), "current": entry.runtime_data.version_coordinator.data.current.to_dict(), diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json index 3ead366f4bf..2da03b00519 100644 --- a/homeassistant/components/peblar/icons.json +++ b/homeassistant/components/peblar/icons.json @@ -16,6 +16,11 @@ } } }, + "switch": { + "force_single_phase": { + "default": "mdi:power-cycle" + } + }, "update": { "customization": { "default": "mdi:palette" diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index e4311df17cd..e7e531f3bf7 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -85,6 +85,11 @@ "name": "Voltage phase 3" } }, + "switch": { + "force_single_phase": { + "name": "Force single phase" + } + }, "update": { "customization": { "name": "Customization" diff --git a/homeassistant/components/peblar/switch.py b/homeassistant/components/peblar/switch.py new file mode 100644 index 00000000000..9a6788a62be --- /dev/null +++ b/homeassistant/components/peblar/switch.py @@ -0,0 +1,102 @@ +"""Support for Peblar selects.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from peblar import PeblarApi + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ( + PeblarConfigEntry, + PeblarData, + PeblarDataUpdateCoordinator, + PeblarRuntimeData, +) + + +@dataclass(frozen=True, kw_only=True) +class PeblarSwitchEntityDescription(SwitchEntityDescription): + """Class describing Peblar switch entities.""" + + has_fn: Callable[[PeblarRuntimeData], bool] = lambda x: True + is_on_fn: Callable[[PeblarData], bool] + set_fn: Callable[[PeblarApi, bool], Awaitable[Any]] + + +DESCRIPTIONS = [ + PeblarSwitchEntityDescription( + key="force_single_phase", + translation_key="force_single_phase", + entity_category=EntityCategory.CONFIG, + has_fn=lambda x: ( + x.data_coordinator.data.system.force_single_phase_allowed + and x.user_configuraton_coordinator.data.connected_phases > 1 + ), + is_on_fn=lambda x: x.ev.force_single_phase, + set_fn=lambda x, on: x.ev_interface(force_single_phase=on), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar switch based on a config entry.""" + async_add_entities( + PeblarSwitchEntity( + entry=entry, + description=description, + ) + for description in DESCRIPTIONS + if description.has_fn(entry.runtime_data) + ) + + +class PeblarSwitchEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], SwitchEntity): + """Defines a Peblar switch entity.""" + + entity_description: PeblarSwitchEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarSwitchEntityDescription, + ) -> None: + """Initialize the select entity.""" + super().__init__(entry.runtime_data.data_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}-{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + + @property + def is_on(self) -> bool: + """Return state of the switch.""" + return self.entity_description.is_on_fn(self.coordinator.data) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + await self.entity_description.set_fn(self.coordinator.api, True) + await self.coordinator.async_request_refresh() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + await self.entity_description.set_fn(self.coordinator.api, False) + await self.coordinator.async_request_refresh() diff --git a/tests/components/peblar/conftest.py b/tests/components/peblar/conftest.py index b8e77da08cd..95daad545b5 100644 --- a/tests/components/peblar/conftest.py +++ b/tests/components/peblar/conftest.py @@ -9,6 +9,7 @@ from unittest.mock import MagicMock, patch from peblar import ( PeblarEVInterface, PeblarMeter, + PeblarSystem, PeblarSystemInformation, PeblarUserConfiguration, PeblarVersions, @@ -71,6 +72,9 @@ def mock_peblar() -> Generator[MagicMock]: api.meter.return_value = PeblarMeter.from_json( load_fixture("meter.json", DOMAIN) ) + api.system.return_value = PeblarSystem.from_json( + load_fixture("system.json", DOMAIN) + ) yield peblar diff --git a/tests/components/peblar/fixtures/system.json b/tests/components/peblar/fixtures/system.json new file mode 100644 index 00000000000..87bb60575da --- /dev/null +++ b/tests/components/peblar/fixtures/system.json @@ -0,0 +1,12 @@ +{ + "ActiveErrorCodes": [], + "ActiveWarningCodes": [], + "CellularSignalStrength": null, + "FirmwareVersion": "1.6.1+1+WL-1", + "Force1PhaseAllowed": true, + "PhaseCount": 3, + "ProductPn": "6004-2300-8002", + "ProductSn": "23-45-A4O-MOF", + "Uptime": 322094, + "WlanSignalStrength": null +} diff --git a/tests/components/peblar/snapshots/test_diagnostics.ambr b/tests/components/peblar/snapshots/test_diagnostics.ambr index 625bb196402..e33a2f557de 100644 --- a/tests/components/peblar/snapshots/test_diagnostics.ambr +++ b/tests/components/peblar/snapshots/test_diagnostics.ambr @@ -20,6 +20,18 @@ 'PowerTotal': 3185, 'VoltagePhase1': 223, }), + 'system': dict({ + 'ActiveErrorCodes': list([ + ]), + 'ActiveWarningCodes': list([ + ]), + 'FirmwareVersion': '1.6.1+1+WL-1', + 'Force1PhaseAllowed': True, + 'PhaseCount': 3, + 'ProductPn': '6004-2300-8002', + 'ProductSn': '23-45-A4O-MOF', + 'Uptime': 322094, + }), 'system_information': dict({ 'BopCalIGainA': 264625, 'BopCalIGainB': 267139, diff --git a/tests/components/peblar/snapshots/test_switch.ambr b/tests/components/peblar/snapshots/test_switch.ambr new file mode 100644 index 00000000000..f4fc768030f --- /dev/null +++ b/tests/components/peblar/snapshots/test_switch.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_entities[switch][switch.peblar_ev_charger_force_single_phase-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.peblar_ev_charger_force_single_phase', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Force single phase', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'force_single_phase', + 'unique_id': '23-45-A4O-MOF-force_single_phase', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[switch][switch.peblar_ev_charger_force_single_phase-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Peblar EV Charger Force single phase', + }), + 'context': , + 'entity_id': 'switch.peblar_ev_charger_force_single_phase', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/peblar/test_switch.py b/tests/components/peblar/test_switch.py new file mode 100644 index 00000000000..7a8fcf7705b --- /dev/null +++ b/tests/components/peblar/test_switch.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar switch platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.SWITCH], indirect=True) +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the switch entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From 81ce5f45059e2d819efd5ef1fad6df239652999e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 21:26:40 +0100 Subject: [PATCH 594/677] Update peblar to v0.3.0 (#133751) --- homeassistant/components/peblar/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json index 1ae2a491ba9..0e3a66dd256 100644 --- a/homeassistant/components/peblar/manifest.json +++ b/homeassistant/components/peblar/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "quality_scale": "bronze", - "requirements": ["peblar==0.2.1"], + "requirements": ["peblar==0.3.0"], "zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }] } diff --git a/requirements_all.txt b/requirements_all.txt index 4cf22eaf153..e7f4aadfe05 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1600,7 +1600,7 @@ panasonic-viera==0.4.2 pdunehd==1.3.2 # homeassistant.components.peblar -peblar==0.2.1 +peblar==0.3.0 # homeassistant.components.peco peco==0.0.30 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 747594117e6..84fc0f11967 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1327,7 +1327,7 @@ panasonic-viera==0.4.2 pdunehd==1.3.2 # homeassistant.components.peblar -peblar==0.2.1 +peblar==0.3.0 # homeassistant.components.peco peco==0.0.30 From 85519a312c0e8ecfbc2c1c219aa00627f2f4ee31 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 22:23:36 +0100 Subject: [PATCH 595/677] Fix Peblar current limit user setting value (#133753) --- homeassistant/components/peblar/number.py | 2 +- tests/components/peblar/snapshots/test_number.ambr | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py index 72c7b02a6e0..a5e926714d9 100644 --- a/homeassistant/components/peblar/number.py +++ b/homeassistant/components/peblar/number.py @@ -48,7 +48,7 @@ DESCRIPTIONS = [ native_max_value_fn=lambda x: x.system_information.hardware_max_current, native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, set_value_fn=lambda x, v: x.ev_interface(charge_current_limit=int(v) * 1000), - value_fn=lambda x: round(x.ev.charge_current_limit_actual / 1000), + value_fn=lambda x: round(x.ev.charge_current_limit / 1000), ), ] diff --git a/tests/components/peblar/snapshots/test_number.ambr b/tests/components/peblar/snapshots/test_number.ambr index 50b44583d1c..d78067849f3 100644 --- a/tests/components/peblar/snapshots/test_number.ambr +++ b/tests/components/peblar/snapshots/test_number.ambr @@ -53,6 +53,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6', + 'state': '16', }) # --- From 5e4e1ce5a7962d71ac78db62772db69cab27e045 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 22:29:11 +0100 Subject: [PATCH 596/677] Add binary sensor platform to Peblar Rocksolid EV Chargers integration (#133755) --- homeassistant/components/peblar/__init__.py | 1 + .../components/peblar/binary_sensor.py | 89 +++++++++++++++++ homeassistant/components/peblar/icons.json | 8 ++ homeassistant/components/peblar/strings.json | 8 ++ .../peblar/snapshots/test_binary_sensor.ambr | 95 +++++++++++++++++++ tests/components/peblar/test_binary_sensor.py | 35 +++++++ 6 files changed, 236 insertions(+) create mode 100644 homeassistant/components/peblar/binary_sensor.py create mode 100644 tests/components/peblar/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/peblar/test_binary_sensor.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index 854565081e8..43c48e28bd0 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -29,6 +29,7 @@ from .coordinator import ( ) PLATFORMS = [ + Platform.BINARY_SENSOR, Platform.NUMBER, Platform.SELECT, Platform.SENSOR, diff --git a/homeassistant/components/peblar/binary_sensor.py b/homeassistant/components/peblar/binary_sensor.py new file mode 100644 index 00000000000..f28a02422a9 --- /dev/null +++ b/homeassistant/components/peblar/binary_sensor.py @@ -0,0 +1,89 @@ +"""Support for Peblar binary sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator + + +@dataclass(frozen=True, kw_only=True) +class PeblarBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class describing Peblar binary sensor entities.""" + + is_on_fn: Callable[[PeblarData], bool] + + +DESCRIPTIONS = [ + PeblarBinarySensorEntityDescription( + key="active_error_codes", + translation_key="active_error_codes", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + is_on_fn=lambda x: bool(x.system.active_error_codes), + ), + PeblarBinarySensorEntityDescription( + key="active_warning_codes", + translation_key="active_warning_codes", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + is_on_fn=lambda x: bool(x.system.active_warning_codes), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar binary sensor based on a config entry.""" + async_add_entities( + PeblarBinarySensorEntity(entry=entry, description=description) + for description in DESCRIPTIONS + ) + + +class PeblarBinarySensorEntity( + CoordinatorEntity[PeblarDataUpdateCoordinator], BinarySensorEntity +): + """Defines a Peblar binary sensor entity.""" + + entity_description: PeblarBinarySensorEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarBinarySensorEntityDescription, + ) -> None: + """Initialize the binary sensor entity.""" + super().__init__(entry.runtime_data.data_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}-{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + + @property + def is_on(self) -> bool: + """Return state of the binary sensor.""" + return self.entity_description.is_on_fn(self.coordinator.data) diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json index 2da03b00519..2b24bf71ebc 100644 --- a/homeassistant/components/peblar/icons.json +++ b/homeassistant/components/peblar/icons.json @@ -1,5 +1,13 @@ { "entity": { + "binary_sensor": { + "active_error_codes": { + "default": "mdi:alert" + }, + "active_warning_codes": { + "default": "mdi:alert" + } + }, "number": { "charge_current_limit": { "default": "mdi:speedometer" diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index e7e531f3bf7..0632fa31dd0 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -33,6 +33,14 @@ } }, "entity": { + "binary_sensor": { + "active_error_codes": { + "name": "Active errors" + }, + "active_warning_codes": { + "name": "Active warnings" + } + }, "number": { "charge_current_limit": { "name": "Charge limit" diff --git a/tests/components/peblar/snapshots/test_binary_sensor.ambr b/tests/components/peblar/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..5dd008dd320 --- /dev/null +++ b/tests/components/peblar/snapshots/test_binary_sensor.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_entities[binary_sensor][binary_sensor.peblar_ev_charger_active_errors-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.peblar_ev_charger_active_errors', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active errors', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_error_codes', + 'unique_id': '23-45-A4O-MOF-active_error_codes', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[binary_sensor][binary_sensor.peblar_ev_charger_active_errors-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Peblar EV Charger Active errors', + }), + 'context': , + 'entity_id': 'binary_sensor.peblar_ev_charger_active_errors', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entities[binary_sensor][binary_sensor.peblar_ev_charger_active_warnings-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.peblar_ev_charger_active_warnings', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Active warnings', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'active_warning_codes', + 'unique_id': '23-45-A4O-MOF-active_warning_codes', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[binary_sensor][binary_sensor.peblar_ev_charger_active_warnings-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Peblar EV Charger Active warnings', + }), + 'context': , + 'entity_id': 'binary_sensor.peblar_ev_charger_active_warnings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/peblar/test_binary_sensor.py b/tests/components/peblar/test_binary_sensor.py new file mode 100644 index 00000000000..670b5b67145 --- /dev/null +++ b/tests/components/peblar/test_binary_sensor.py @@ -0,0 +1,35 @@ +"""Tests for the Peblar binary sensor platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize("init_integration", [Platform.BINARY_SENSOR], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the binary sensors entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From 9dc20b5709b7bfe2c11e1e518a9d86eb81d8f143 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 22:40:15 +0100 Subject: [PATCH 597/677] Add more sensors to Peblar Rocksolid EV Chargers integration (#133754) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/peblar/const.py | 32 +++ homeassistant/components/peblar/icons.json | 11 + homeassistant/components/peblar/sensor.py | 44 ++- homeassistant/components/peblar/strings.json | 61 ++++- .../peblar/snapshots/test_sensor.ambr | 256 ++++++++++++++++++ tests/components/peblar/test_sensor.py | 1 + 6 files changed, 386 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/peblar/const.py b/homeassistant/components/peblar/const.py index b986c866d16..d7d7c2fa5b5 100644 --- a/homeassistant/components/peblar/const.py +++ b/homeassistant/components/peblar/const.py @@ -5,6 +5,38 @@ from __future__ import annotations import logging from typing import Final +from peblar import ChargeLimiter, CPState + DOMAIN: Final = "peblar" LOGGER = logging.getLogger(__package__) + +PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT = { + ChargeLimiter.CHARGING_CABLE: "charging_cable", + ChargeLimiter.CURRENT_LIMITER: "current_limiter", + ChargeLimiter.DYNAMIC_LOAD_BALANCING: "dynamic_load_balancing", + ChargeLimiter.EXTERNAL_POWER_LIMIT: "external_power_limit", + ChargeLimiter.GROUP_LOAD_BALANCING: "group_load_balancing", + ChargeLimiter.HARDWARE_LIMITATION: "hardware_limitation", + ChargeLimiter.HIGH_TEMPERATURE: "high_temperature", + ChargeLimiter.HOUSEHOLD_POWER_LIMIT: "household_power_limit", + ChargeLimiter.INSTALLATION_LIMIT: "installation_limit", + ChargeLimiter.LOCAL_MODBUS_API: "local_modbus_api", + ChargeLimiter.LOCAL_REST_API: "local_rest_api", + ChargeLimiter.LOCAL_SCHEDULED: "local_scheduled", + ChargeLimiter.OCPP_SMART_CHARGING: "ocpp_smart_charging", + ChargeLimiter.OVERCURRENT_PROTECTION: "overcurrent_protection", + ChargeLimiter.PHASE_IMBALANCE: "phase_imbalance", + ChargeLimiter.POWER_FACTOR: "power_factor", + ChargeLimiter.SOLAR_CHARGING: "solar_charging", +} + +PEBLAR_CP_STATE_TO_HOME_ASSISTANT = { + CPState.CHARGING_SUSPENDED: "suspended", + CPState.CHARGING_VENTILATION: "charging", + CPState.CHARGING: "charging", + CPState.ERROR: "error", + CPState.FAULT: "fault", + CPState.INVALID: "invalid", + CPState.NO_EV_CONNECTED: "no_ev_connected", +} diff --git a/homeassistant/components/peblar/icons.json b/homeassistant/components/peblar/icons.json index 2b24bf71ebc..6244945077b 100644 --- a/homeassistant/components/peblar/icons.json +++ b/homeassistant/components/peblar/icons.json @@ -24,6 +24,17 @@ } } }, + "sensor": { + "cp_state": { + "default": "mdi:ev-plug-type2" + }, + "charge_current_limit_source": { + "default": "mdi:arrow-collapse-up" + }, + "uptime": { + "default": "mdi:timer" + } + }, "switch": { "force_single_phase": { "default": "mdi:power-cycle" diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index 285a8dd5ea0..233417051cb 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from datetime import datetime, timedelta from peblar import PeblarUserConfiguration @@ -24,8 +25,13 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.util.dt import utcnow -from .const import DOMAIN +from .const import ( + DOMAIN, + PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT, + PEBLAR_CP_STATE_TO_HOME_ASSISTANT, +) from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator @@ -34,21 +40,37 @@ class PeblarSensorDescription(SensorEntityDescription): """Describe a Peblar sensor.""" has_fn: Callable[[PeblarUserConfiguration], bool] = lambda _: True - value_fn: Callable[[PeblarData], int | None] + value_fn: Callable[[PeblarData], datetime | int | str | None] DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( PeblarSensorDescription( - key="current", + key="cp_state", + translation_key="cp_state", + device_class=SensorDeviceClass.ENUM, + options=list(PEBLAR_CP_STATE_TO_HOME_ASSISTANT.values()), + value_fn=lambda x: PEBLAR_CP_STATE_TO_HOME_ASSISTANT[x.ev.cp_state], + ), + PeblarSensorDescription( + key="charge_current_limit_source", + translation_key="charge_current_limit_source", + device_class=SensorDeviceClass.ENUM, + entity_category=EntityCategory.DIAGNOSTIC, + options=list(PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT.values()), + value_fn=lambda x: PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT[ + x.ev.charge_current_limit_source + ], + ), + PeblarSensorDescription( + key="current_total", device_class=SensorDeviceClass.CURRENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, - has_fn=lambda x: x.connected_phases == 1, native_unit_of_measurement=UnitOfElectricCurrent.MILLIAMPERE, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=1, suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - value_fn=lambda x: x.meter.current_phase_1, + value_fn=lambda x: x.meter.current_total, ), PeblarSensorDescription( key="current_phase_1", @@ -193,6 +215,16 @@ DESCRIPTIONS: tuple[PeblarSensorDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, value_fn=lambda x: x.meter.voltage_phase_3, ), + PeblarSensorDescription( + key="uptime", + translation_key="uptime", + device_class=SensorDeviceClass.TIMESTAMP, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + value_fn=lambda x: ( + utcnow().replace(microsecond=0) - timedelta(seconds=x.system.uptime) + ), + ), ) @@ -232,6 +264,6 @@ class PeblarSensorEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], SensorE ) @property - def native_value(self) -> int | None: + def native_value(self) -> datetime | int | str | None: """Return the state of the sensor.""" return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 0632fa31dd0..01022a19c38 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -1,8 +1,16 @@ { "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_serial_number": "The discovered Peblar device did not provide a serial number." + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, "step": { "user": { - "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar charger and the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant.", "data": { "host": "[%key:common::config_flow::data::host%]", "password": "[%key:common::config_flow::data::password%]" @@ -10,26 +18,18 @@ "data_description": { "host": "The hostname or IP address of your Peblar charger on your home network.", "password": "The same password as you use to log in to the Peblar device' local web interface." - } + }, + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need to get the IP address of your Peblar charger and the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant." }, "zeroconf_confirm": { - "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant.", "data": { "password": "[%key:common::config_flow::data::password%]" }, "data_description": { "password": "[%key:component::peblar::config::step::user::data_description::password%]" - } + }, + "description": "Set up your Peblar EV charger to integrate with Home Assistant.\n\nTo do so, you will need the password you use to log into the Peblar device' web interface.\n\nHome Assistant will automatically configure your Peblar charger for use with Home Assistant." } - }, - "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]" - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_serial_number": "The discovered Peblar device did not provide a serial number." } }, "entity": { @@ -59,6 +59,38 @@ } }, "sensor": { + "charge_current_limit_source": { + "name": "Limit source", + "state": { + "charging_cable": "Charging cable", + "current_limiter": "Current limiter", + "dynamic_load_balancing": "Dynamic load balancing", + "external_power_limit": "External power limit", + "group_load_balancing": "Group load balancing", + "hardware_limitation": "Hardware limitation", + "high_temperature": "High temperature", + "household_power_limit": "Household power limit", + "installation_limit": "Installation limit", + "local_modbus_api": "Modbus API", + "local_rest_api": "REST API", + "ocpp_smart_charging": "OCPP smart charging", + "overcurrent_protection": "Overcurrent protection", + "phase_imbalance": "Phase imbalance", + "power_factor": "Power factor", + "solar_charging": "Solar charging" + } + }, + "cp_state": { + "name": "State", + "state": { + "charging": "Charging", + "error": "Error", + "fault": "Fault", + "invalid": "Invalid", + "no_ev_connected": "No EV connected", + "suspended": "Suspended" + } + }, "current_phase_1": { "name": "Current phase 1" }, @@ -83,6 +115,9 @@ "power_phase_3": { "name": "Power phase 3" }, + "uptime": { + "name": "Uptime" + }, "voltage_phase_1": { "name": "Voltage phase 1" }, diff --git a/tests/components/peblar/snapshots/test_sensor.ambr b/tests/components/peblar/snapshots/test_sensor.ambr index c3020b60078..da17a4661ee 100644 --- a/tests/components/peblar/snapshots/test_sensor.ambr +++ b/tests/components/peblar/snapshots/test_sensor.ambr @@ -1,4 +1,61 @@ # serializer version: 1 +# name: test_entities[sensor][sensor.peblar_ev_charger_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_current_total', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Peblar EV Charger Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14.242', + }) +# --- # name: test_entities[sensor][sensor.peblar_ev_charger_current_phase_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -227,6 +284,92 @@ 'state': '880.703', }) # --- +# name: test_entities[sensor][sensor.peblar_ev_charger_limit_source-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'charging_cable', + 'current_limiter', + 'dynamic_load_balancing', + 'external_power_limit', + 'group_load_balancing', + 'hardware_limitation', + 'high_temperature', + 'household_power_limit', + 'installation_limit', + 'local_modbus_api', + 'local_rest_api', + 'local_scheduled', + 'ocpp_smart_charging', + 'overcurrent_protection', + 'phase_imbalance', + 'power_factor', + 'solar_charging', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_limit_source', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Limit source', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_current_limit_source', + 'unique_id': '23-45-A4O-MOF_charge_current_limit_source', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_limit_source-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Peblar EV Charger Limit source', + 'options': list([ + 'charging_cable', + 'current_limiter', + 'dynamic_load_balancing', + 'external_power_limit', + 'group_load_balancing', + 'hardware_limitation', + 'high_temperature', + 'household_power_limit', + 'installation_limit', + 'local_modbus_api', + 'local_rest_api', + 'local_scheduled', + 'ocpp_smart_charging', + 'overcurrent_protection', + 'phase_imbalance', + 'power_factor', + 'solar_charging', + ]), + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_limit_source', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'current_limiter', + }) +# --- # name: test_entities[sensor][sensor.peblar_ev_charger_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -488,6 +631,119 @@ 'state': '0.381', }) # --- +# name: test_entities[sensor][sensor.peblar_ev_charger_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'suspended', + 'charging', + 'charging', + 'error', + 'fault', + 'invalid', + 'no_ev_connected', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.peblar_ev_charger_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cp_state', + 'unique_id': '23-45-A4O-MOF_cp_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Peblar EV Charger State', + 'options': list([ + 'suspended', + 'charging', + 'charging', + 'error', + 'fault', + 'invalid', + 'no_ev_connected', + ]), + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'charging', + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.peblar_ev_charger_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uptime', + 'unique_id': '23-45-A4O-MOF_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor][sensor.peblar_ev_charger_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Peblar EV Charger Uptime', + }), + 'context': , + 'entity_id': 'sensor.peblar_ev_charger_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-12-18T04:16:46+00:00', + }) +# --- # name: test_entities[sensor][sensor.peblar_ev_charger_voltage_phase_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/peblar/test_sensor.py b/tests/components/peblar/test_sensor.py index 97402206d33..bad81486838 100644 --- a/tests/components/peblar/test_sensor.py +++ b/tests/components/peblar/test_sensor.py @@ -11,6 +11,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +@pytest.mark.freeze_time("2024-12-21 21:45:00") @pytest.mark.parametrize("init_integration", [Platform.SENSOR], indirect=True) @pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") async def test_entities( From 9fcf8f22d2cd2b88fa4ce34382e999565eb00f61 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 21 Dec 2024 23:00:29 +0100 Subject: [PATCH 598/677] Add reauthentication support to Peblar Rocksolid EV Chargers integration (#133757) --- homeassistant/components/peblar/__init__.py | 4 +- .../components/peblar/config_flow.py | 51 +++++++++++++ .../components/peblar/quality_scale.yaml | 2 +- homeassistant/components/peblar/strings.json | 12 +++- tests/components/peblar/test_config_flow.py | 72 +++++++++++++++++++ tests/components/peblar/test_init.py | 13 +++- 6 files changed, 149 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index 43c48e28bd0..a055a1a02c8 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -15,7 +15,7 @@ from peblar import ( from homeassistant.const import CONF_HOST, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_create_clientsession @@ -53,7 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo except PeblarConnectionError as err: raise ConfigEntryNotReady("Could not connect to Peblar charger") from err except PeblarAuthenticationError as err: - raise ConfigEntryError("Could not login to Peblar charger") from err + raise ConfigEntryAuthFailed from err except PeblarError as err: raise ConfigEntryNotReady( "Unknown error occurred while connecting to Peblar charger" diff --git a/homeassistant/components/peblar/config_flow.py b/homeassistant/components/peblar/config_flow.py index a9cfb7d89b9..809cb13746e 100644 --- a/homeassistant/components/peblar/config_flow.py +++ b/homeassistant/components/peblar/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any from aiohttp import CookieJar @@ -129,3 +130,53 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): ), errors=errors, ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle initiation of re-authentication with a Peblar device.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication with a Peblar device.""" + errors = {} + + if user_input is not None: + reauth_entry = self._get_reauth_entry() + peblar = Peblar( + host=reauth_entry.data[CONF_HOST], + session=async_create_clientsession( + self.hass, cookie_jar=CookieJar(unsafe=True) + ), + ) + try: + await peblar.login(password=user_input[CONF_PASSWORD]) + except PeblarAuthenticationError: + errors[CONF_PASSWORD] = "invalid_auth" + except PeblarConnectionError: + errors["base"] = "cannot_connect" + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_update_reload_and_abort( + reauth_entry, + data={ + CONF_HOST: reauth_entry.data[CONF_HOST], + CONF_PASSWORD: user_input[CONF_PASSWORD], + }, + ) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } + ), + errors=errors, + ) diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index 3dc470ce76b..2b0684793a8 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -36,7 +36,7 @@ rules: integration-owner: done log-when-unavailable: done parallel-updates: todo - reauthentication-flow: todo + reauthentication-flow: done test-coverage: todo # Gold devices: todo diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 01022a19c38..0cce7ed8191 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -2,7 +2,8 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_serial_number": "The discovered Peblar device did not provide a serial number." + "no_serial_number": "The discovered Peblar device did not provide a serial number.", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -10,6 +11,15 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "step": { + "reauth_confirm": { + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::peblar::config::step::user::data_description::password%]" + }, + "description": "Reauthenticate with your Peblar RV charger.\n\nTo do so, you will need to enter your new password you use to log into Peblar's device web interface." + }, "user": { "data": { "host": "[%key:common::config_flow::data::host%]", diff --git a/tests/components/peblar/test_config_flow.py b/tests/components/peblar/test_config_flow.py index 4e3ab008047..a4a461b6bba 100644 --- a/tests/components/peblar/test_config_flow.py +++ b/tests/components/peblar/test_config_flow.py @@ -319,3 +319,75 @@ async def test_user_flow_with_zeroconf_in_progress(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert not hass.config_entries.flow.async_progress() + + +@pytest.mark.usefixtures("mock_peblar") +async def test_reauth_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the reauthentication configuration flow.""" + mock_config_entry.add_to_hass(hass) + assert mock_config_entry.data[CONF_PASSWORD] == "OMGSPIDERS" + + result = await mock_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "OMGPUPPIES"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + assert mock_config_entry.data == { + CONF_HOST: "127.0.0.127", + CONF_PASSWORD: "OMGPUPPIES", + } + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [ + (PeblarConnectionError, {"base": "cannot_connect"}), + (PeblarAuthenticationError, {CONF_PASSWORD: "invalid_auth"}), + (Exception, {"base": "unknown"}), + ], +) +async def test_reauth_flow_errors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, + side_effect: Exception, + expected_error: dict[str, str], +) -> None: + """Test we show form on a error.""" + mock_config_entry.add_to_hass(hass) + mock_peblar.login.side_effect = side_effect + + result = await mock_config_entry.start_reauth_flow(hass) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == expected_error + + mock_peblar.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" diff --git a/tests/components/peblar/test_init.py b/tests/components/peblar/test_init.py index ca7b0d88c24..6e6a9c2af05 100644 --- a/tests/components/peblar/test_init.py +++ b/tests/components/peblar/test_init.py @@ -7,7 +7,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.peblar.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -70,6 +70,17 @@ async def test_config_entry_authentication_failed( assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id + @pytest.mark.usefixtures("init_integration") async def test_peblar_device_entry( From 662dea28eddb9b7b28dd328f4c40398224e780ad Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 21 Dec 2024 17:25:48 -1000 Subject: [PATCH 599/677] Replace queries using distinct with correlated scalar subqueries to significantly improve purge performance (#133748) Replace queries using distinct with correlated scalar subqueries like #133553 and #133699 PostgreSQL does not support skip/loose index scan https://wiki.postgresql.org/wiki/Loose_indexscan This makes the `distinct` query (see section `Selecting Distinct Values` in the wiki above) to find the unused ids very expense. We can replace them with correlated scalar subqueries as done in #133553 to avoid the `distinct` --- homeassistant/components/recorder/queries.py | 73 ++++++++++++++++---- 1 file changed, 58 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 7ac4c19bc94..71e50cf13d6 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -5,7 +5,16 @@ from __future__ import annotations from collections.abc import Iterable from datetime import datetime -from sqlalchemy import delete, distinct, func, lambda_stmt, select, union_all, update +from sqlalchemy import ( + and_, + delete, + distinct, + func, + lambda_stmt, + select, + union_all, + update, +) from sqlalchemy.sql.lambdas import StatementLambdaElement from sqlalchemy.sql.selectable import Select @@ -838,16 +847,33 @@ def get_migration_changes() -> StatementLambdaElement: def find_event_types_to_purge() -> StatementLambdaElement: - """Find event_type_ids to purge.""" + """Find event_type_ids to purge. + + PostgreSQL does not support skip/loose index scan + https://wiki.postgresql.org/wiki/Loose_indexscan + + To avoid using distinct, we use a subquery to get the latest time_fired_ts + for each event_type. This is then used to filter out the event_type_ids + that no longer exist in the Events table. + + This query is fast for SQLite, MariaDB, MySQL, and PostgreSQL. + """ return lambda_stmt( lambda: select(EventTypes.event_type_id, EventTypes.event_type).where( EventTypes.event_type_id.not_in( - select(EventTypes.event_type_id).join( - used_event_type_ids := select( - distinct(Events.event_type_id).label("used_event_type_id") - ).subquery(), - EventTypes.event_type_id - == used_event_type_ids.c.used_event_type_id, + select(EventTypes.event_type_id) + .select_from(EventTypes) + .join( + Events, + and_( + EventTypes.event_type_id == Events.event_type_id, + Events.time_fired_ts + == select(Events.time_fired_ts) + .where(Events.event_type_id == EventTypes.event_type_id) + .limit(1) + .scalar_subquery() + .correlate(EventTypes), + ), ) ) ) @@ -855,16 +881,33 @@ def find_event_types_to_purge() -> StatementLambdaElement: def find_entity_ids_to_purge() -> StatementLambdaElement: - """Find entity_ids to purge.""" + """Find metadata_ids for each entity_id to purge. + + PostgreSQL does not support skip/loose index scan + https://wiki.postgresql.org/wiki/Loose_indexscan + + To avoid using distinct, we use a subquery to get the latest last_updated_ts + for each entity_id. This is then used to filter out the metadata_ids + that no longer exist in the States table. + + This query is fast for SQLite, MariaDB, MySQL, and PostgreSQL. + """ return lambda_stmt( lambda: select(StatesMeta.metadata_id, StatesMeta.entity_id).where( StatesMeta.metadata_id.not_in( - select(StatesMeta.metadata_id).join( - used_states_metadata_id := select( - distinct(States.metadata_id).label("used_states_metadata_id") - ).subquery(), - StatesMeta.metadata_id - == used_states_metadata_id.c.used_states_metadata_id, + select(StatesMeta.metadata_id) + .select_from(StatesMeta) + .join( + States, + and_( + StatesMeta.metadata_id == States.metadata_id, + States.last_updated_ts + == select(States.last_updated_ts) + .where(States.metadata_id == StatesMeta.metadata_id) + .limit(1) + .scalar_subquery() + .correlate(StatesMeta), + ), ) ) ) From c2a9b0ff527aa69ef55a26f05c3a0abef5f1041a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 21 Dec 2024 19:38:11 -1000 Subject: [PATCH 600/677] Reduce complexity to find unused data_ids and attributes_ids for db engines with slow range select (#133752) --- homeassistant/components/recorder/purge.py | 85 +--- homeassistant/components/recorder/queries.py | 475 ++----------------- 2 files changed, 61 insertions(+), 499 deletions(-) diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 881952c390d..ea2b93efba7 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable from datetime import datetime -from itertools import zip_longest import logging import time from typing import TYPE_CHECKING @@ -297,68 +296,18 @@ def _select_unused_attributes_ids( seen_ids: set[int] = set() if not database_engine.optimizer.slow_range_in_select: - # + query = attributes_ids_exist_in_states_with_fast_in_distinct # SQLite has a superior query optimizer for the distinct query below as it uses # the covering index without having to examine the rows directly for both of the # queries below. - # - # We use the distinct query for SQLite since the query in the other branch can - # generate more than 500 unions which SQLite does not support. - # - # How MariaDB's query optimizer handles this query: - # > explain select distinct attributes_id from states where attributes_id in - # (136723); - # ...Using index - # - for attributes_ids_chunk in chunked_or_all( - attributes_ids, instance.max_bind_vars - ): - seen_ids.update( - state[0] - for state in session.execute( - attributes_ids_exist_in_states_with_fast_in_distinct( - attributes_ids_chunk - ) - ).all() - ) else: - # + query = attributes_ids_exist_in_states # This branch is for DBMS that cannot optimize the distinct query well and has # to examine all the rows that match. - # - # This branch uses a union of simple queries, as each query is optimized away - # as the answer to the query can be found in the index. - # - # The below query works for SQLite as long as there are no more than 500 - # attributes_id to be selected. We currently do not have MySQL or PostgreSQL - # servers running in the test suite; we test this path using SQLite when there - # are less than 500 attributes_id. - # - # How MariaDB's query optimizer handles this query: - # > explain select min(attributes_id) from states where attributes_id = 136723; - # ...Select tables optimized away - # - # We used to generate a query based on how many attribute_ids to find but - # that meant sqlalchemy Transparent SQL Compilation Caching was working against - # us by cached up to max_bind_vars different statements which could be - # up to 500MB for large database due to the complexity of the ORM objects. - # - # We now break the query into groups of 100 and use a lambda_stmt to ensure - # that the query is only cached once. - # - # PostgreSQL also suffers from the same issue as older MariaDB with the distinct query - # when the database gets large because it doesn't support skip/loose index scan. - # https://wiki.postgresql.org/wiki/Loose_indexscan - # https://github.com/home-assistant/core/issues/126084 - groups = [iter(attributes_ids)] * 100 - for attr_ids in zip_longest(*groups, fillvalue=None): - seen_ids |= { - attrs_id[0] - for attrs_id in session.execute( - attributes_ids_exist_in_states(*attr_ids) # type: ignore[arg-type] - ).all() - if attrs_id[0] is not None - } + for attributes_ids_chunk in chunked_or_all(attributes_ids, instance.max_bind_vars): + seen_ids.update( + state[0] for state in session.execute(query(attributes_ids_chunk)).all() + ) to_remove = attributes_ids - seen_ids _LOGGER.debug( "Selected %s shared attributes to remove", @@ -395,23 +344,13 @@ def _select_unused_event_data_ids( # See _select_unused_attributes_ids for why this function # branches for non-sqlite databases. if not database_engine.optimizer.slow_range_in_select: - for data_ids_chunk in chunked_or_all(data_ids, instance.max_bind_vars): - seen_ids.update( - state[0] - for state in session.execute( - data_ids_exist_in_events_with_fast_in_distinct(data_ids_chunk) - ).all() - ) + query = data_ids_exist_in_events_with_fast_in_distinct else: - groups = [iter(data_ids)] * 100 - for data_ids_group in zip_longest(*groups, fillvalue=None): - seen_ids |= { - data_id[0] - for data_id in session.execute( - data_ids_exist_in_events(*data_ids_group) # type: ignore[arg-type] - ).all() - if data_id[0] is not None - } + query = data_ids_exist_in_events + for data_ids_chunk in chunked_or_all(data_ids, instance.max_bind_vars): + seen_ids.update( + state[0] for state in session.execute(query(data_ids_chunk)).all() + ) to_remove = data_ids - seen_ids _LOGGER.debug("Selected %s shared event data to remove", len(to_remove)) return to_remove diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 71e50cf13d6..eb681f86702 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -5,16 +5,7 @@ from __future__ import annotations from collections.abc import Iterable from datetime import datetime -from sqlalchemy import ( - and_, - delete, - distinct, - func, - lambda_stmt, - select, - union_all, - update, -) +from sqlalchemy import and_, delete, distinct, func, lambda_stmt, select, update from sqlalchemy.sql.lambdas import StatementLambdaElement from sqlalchemy.sql.selectable import Select @@ -85,11 +76,6 @@ def find_states_metadata_ids(entity_ids: Iterable[str]) -> StatementLambdaElemen ) -def _state_attrs_exist(attr: int | None) -> Select: - """Check if a state attributes id exists in the states table.""" - return select(States.attributes_id).where(States.attributes_id == attr).limit(1) - - def attributes_ids_exist_in_states_with_fast_in_distinct( attributes_ids: Iterable[int], ) -> StatementLambdaElement: @@ -102,214 +88,35 @@ def attributes_ids_exist_in_states_with_fast_in_distinct( def attributes_ids_exist_in_states( - attr1: int, - attr2: int | None, - attr3: int | None, - attr4: int | None, - attr5: int | None, - attr6: int | None, - attr7: int | None, - attr8: int | None, - attr9: int | None, - attr10: int | None, - attr11: int | None, - attr12: int | None, - attr13: int | None, - attr14: int | None, - attr15: int | None, - attr16: int | None, - attr17: int | None, - attr18: int | None, - attr19: int | None, - attr20: int | None, - attr21: int | None, - attr22: int | None, - attr23: int | None, - attr24: int | None, - attr25: int | None, - attr26: int | None, - attr27: int | None, - attr28: int | None, - attr29: int | None, - attr30: int | None, - attr31: int | None, - attr32: int | None, - attr33: int | None, - attr34: int | None, - attr35: int | None, - attr36: int | None, - attr37: int | None, - attr38: int | None, - attr39: int | None, - attr40: int | None, - attr41: int | None, - attr42: int | None, - attr43: int | None, - attr44: int | None, - attr45: int | None, - attr46: int | None, - attr47: int | None, - attr48: int | None, - attr49: int | None, - attr50: int | None, - attr51: int | None, - attr52: int | None, - attr53: int | None, - attr54: int | None, - attr55: int | None, - attr56: int | None, - attr57: int | None, - attr58: int | None, - attr59: int | None, - attr60: int | None, - attr61: int | None, - attr62: int | None, - attr63: int | None, - attr64: int | None, - attr65: int | None, - attr66: int | None, - attr67: int | None, - attr68: int | None, - attr69: int | None, - attr70: int | None, - attr71: int | None, - attr72: int | None, - attr73: int | None, - attr74: int | None, - attr75: int | None, - attr76: int | None, - attr77: int | None, - attr78: int | None, - attr79: int | None, - attr80: int | None, - attr81: int | None, - attr82: int | None, - attr83: int | None, - attr84: int | None, - attr85: int | None, - attr86: int | None, - attr87: int | None, - attr88: int | None, - attr89: int | None, - attr90: int | None, - attr91: int | None, - attr92: int | None, - attr93: int | None, - attr94: int | None, - attr95: int | None, - attr96: int | None, - attr97: int | None, - attr98: int | None, - attr99: int | None, - attr100: int | None, + attributes_ids: Iterable[int], ) -> StatementLambdaElement: - """Generate the find attributes select only once. + """Find attributes ids that exist in the states table. - https://docs.sqlalchemy.org/en/14/core/connections.html#quick-guidelines-for-lambdas + PostgreSQL does not support skip/loose index scan + https://wiki.postgresql.org/wiki/Loose_indexscan + + To avoid using distinct, we use a subquery to get the latest last_updated_ts + for each attributes_id. This is then used to filter out the attributes_id + that no longer exist in the States table. + + This query is fast for older MariaDB, older MySQL, and PostgreSQL. """ return lambda_stmt( - lambda: union_all( - _state_attrs_exist(attr1), - _state_attrs_exist(attr2), - _state_attrs_exist(attr3), - _state_attrs_exist(attr4), - _state_attrs_exist(attr5), - _state_attrs_exist(attr6), - _state_attrs_exist(attr7), - _state_attrs_exist(attr8), - _state_attrs_exist(attr9), - _state_attrs_exist(attr10), - _state_attrs_exist(attr11), - _state_attrs_exist(attr12), - _state_attrs_exist(attr13), - _state_attrs_exist(attr14), - _state_attrs_exist(attr15), - _state_attrs_exist(attr16), - _state_attrs_exist(attr17), - _state_attrs_exist(attr18), - _state_attrs_exist(attr19), - _state_attrs_exist(attr20), - _state_attrs_exist(attr21), - _state_attrs_exist(attr22), - _state_attrs_exist(attr23), - _state_attrs_exist(attr24), - _state_attrs_exist(attr25), - _state_attrs_exist(attr26), - _state_attrs_exist(attr27), - _state_attrs_exist(attr28), - _state_attrs_exist(attr29), - _state_attrs_exist(attr30), - _state_attrs_exist(attr31), - _state_attrs_exist(attr32), - _state_attrs_exist(attr33), - _state_attrs_exist(attr34), - _state_attrs_exist(attr35), - _state_attrs_exist(attr36), - _state_attrs_exist(attr37), - _state_attrs_exist(attr38), - _state_attrs_exist(attr39), - _state_attrs_exist(attr40), - _state_attrs_exist(attr41), - _state_attrs_exist(attr42), - _state_attrs_exist(attr43), - _state_attrs_exist(attr44), - _state_attrs_exist(attr45), - _state_attrs_exist(attr46), - _state_attrs_exist(attr47), - _state_attrs_exist(attr48), - _state_attrs_exist(attr49), - _state_attrs_exist(attr50), - _state_attrs_exist(attr51), - _state_attrs_exist(attr52), - _state_attrs_exist(attr53), - _state_attrs_exist(attr54), - _state_attrs_exist(attr55), - _state_attrs_exist(attr56), - _state_attrs_exist(attr57), - _state_attrs_exist(attr58), - _state_attrs_exist(attr59), - _state_attrs_exist(attr60), - _state_attrs_exist(attr61), - _state_attrs_exist(attr62), - _state_attrs_exist(attr63), - _state_attrs_exist(attr64), - _state_attrs_exist(attr65), - _state_attrs_exist(attr66), - _state_attrs_exist(attr67), - _state_attrs_exist(attr68), - _state_attrs_exist(attr69), - _state_attrs_exist(attr70), - _state_attrs_exist(attr71), - _state_attrs_exist(attr72), - _state_attrs_exist(attr73), - _state_attrs_exist(attr74), - _state_attrs_exist(attr75), - _state_attrs_exist(attr76), - _state_attrs_exist(attr77), - _state_attrs_exist(attr78), - _state_attrs_exist(attr79), - _state_attrs_exist(attr80), - _state_attrs_exist(attr81), - _state_attrs_exist(attr82), - _state_attrs_exist(attr83), - _state_attrs_exist(attr84), - _state_attrs_exist(attr85), - _state_attrs_exist(attr86), - _state_attrs_exist(attr87), - _state_attrs_exist(attr88), - _state_attrs_exist(attr89), - _state_attrs_exist(attr90), - _state_attrs_exist(attr91), - _state_attrs_exist(attr92), - _state_attrs_exist(attr93), - _state_attrs_exist(attr94), - _state_attrs_exist(attr95), - _state_attrs_exist(attr96), - _state_attrs_exist(attr97), - _state_attrs_exist(attr98), - _state_attrs_exist(attr99), - _state_attrs_exist(attr100), + lambda: select(StateAttributes.attributes_id) + .select_from(StateAttributes) + .join( + States, + and_( + States.attributes_id == StateAttributes.attributes_id, + States.last_updated_ts + == select(States.last_updated_ts) + .where(States.attributes_id == StateAttributes.attributes_id) + .limit(1) + .scalar_subquery() + .correlate(StateAttributes), + ), ) + .where(StateAttributes.attributes_id.in_(attributes_ids)) ) @@ -322,220 +129,36 @@ def data_ids_exist_in_events_with_fast_in_distinct( ) -def _event_data_id_exist(data_id: int | None) -> Select: - """Check if a event data id exists in the events table.""" - return select(Events.data_id).where(Events.data_id == data_id).limit(1) - - def data_ids_exist_in_events( - id1: int, - id2: int | None, - id3: int | None, - id4: int | None, - id5: int | None, - id6: int | None, - id7: int | None, - id8: int | None, - id9: int | None, - id10: int | None, - id11: int | None, - id12: int | None, - id13: int | None, - id14: int | None, - id15: int | None, - id16: int | None, - id17: int | None, - id18: int | None, - id19: int | None, - id20: int | None, - id21: int | None, - id22: int | None, - id23: int | None, - id24: int | None, - id25: int | None, - id26: int | None, - id27: int | None, - id28: int | None, - id29: int | None, - id30: int | None, - id31: int | None, - id32: int | None, - id33: int | None, - id34: int | None, - id35: int | None, - id36: int | None, - id37: int | None, - id38: int | None, - id39: int | None, - id40: int | None, - id41: int | None, - id42: int | None, - id43: int | None, - id44: int | None, - id45: int | None, - id46: int | None, - id47: int | None, - id48: int | None, - id49: int | None, - id50: int | None, - id51: int | None, - id52: int | None, - id53: int | None, - id54: int | None, - id55: int | None, - id56: int | None, - id57: int | None, - id58: int | None, - id59: int | None, - id60: int | None, - id61: int | None, - id62: int | None, - id63: int | None, - id64: int | None, - id65: int | None, - id66: int | None, - id67: int | None, - id68: int | None, - id69: int | None, - id70: int | None, - id71: int | None, - id72: int | None, - id73: int | None, - id74: int | None, - id75: int | None, - id76: int | None, - id77: int | None, - id78: int | None, - id79: int | None, - id80: int | None, - id81: int | None, - id82: int | None, - id83: int | None, - id84: int | None, - id85: int | None, - id86: int | None, - id87: int | None, - id88: int | None, - id89: int | None, - id90: int | None, - id91: int | None, - id92: int | None, - id93: int | None, - id94: int | None, - id95: int | None, - id96: int | None, - id97: int | None, - id98: int | None, - id99: int | None, - id100: int | None, + data_ids: Iterable[int], ) -> StatementLambdaElement: - """Generate the find event data select only once. + """Find data ids that exist in the events table. - https://docs.sqlalchemy.org/en/14/core/connections.html#quick-guidelines-for-lambdas + PostgreSQL does not support skip/loose index scan + https://wiki.postgresql.org/wiki/Loose_indexscan + + To avoid using distinct, we use a subquery to get the latest time_fired_ts + for each data_id. This is then used to filter out the data_id + that no longer exist in the Events table. + + This query is fast for older MariaDB, older MySQL, and PostgreSQL. """ return lambda_stmt( - lambda: union_all( - _event_data_id_exist(id1), - _event_data_id_exist(id2), - _event_data_id_exist(id3), - _event_data_id_exist(id4), - _event_data_id_exist(id5), - _event_data_id_exist(id6), - _event_data_id_exist(id7), - _event_data_id_exist(id8), - _event_data_id_exist(id9), - _event_data_id_exist(id10), - _event_data_id_exist(id11), - _event_data_id_exist(id12), - _event_data_id_exist(id13), - _event_data_id_exist(id14), - _event_data_id_exist(id15), - _event_data_id_exist(id16), - _event_data_id_exist(id17), - _event_data_id_exist(id18), - _event_data_id_exist(id19), - _event_data_id_exist(id20), - _event_data_id_exist(id21), - _event_data_id_exist(id22), - _event_data_id_exist(id23), - _event_data_id_exist(id24), - _event_data_id_exist(id25), - _event_data_id_exist(id26), - _event_data_id_exist(id27), - _event_data_id_exist(id28), - _event_data_id_exist(id29), - _event_data_id_exist(id30), - _event_data_id_exist(id31), - _event_data_id_exist(id32), - _event_data_id_exist(id33), - _event_data_id_exist(id34), - _event_data_id_exist(id35), - _event_data_id_exist(id36), - _event_data_id_exist(id37), - _event_data_id_exist(id38), - _event_data_id_exist(id39), - _event_data_id_exist(id40), - _event_data_id_exist(id41), - _event_data_id_exist(id42), - _event_data_id_exist(id43), - _event_data_id_exist(id44), - _event_data_id_exist(id45), - _event_data_id_exist(id46), - _event_data_id_exist(id47), - _event_data_id_exist(id48), - _event_data_id_exist(id49), - _event_data_id_exist(id50), - _event_data_id_exist(id51), - _event_data_id_exist(id52), - _event_data_id_exist(id53), - _event_data_id_exist(id54), - _event_data_id_exist(id55), - _event_data_id_exist(id56), - _event_data_id_exist(id57), - _event_data_id_exist(id58), - _event_data_id_exist(id59), - _event_data_id_exist(id60), - _event_data_id_exist(id61), - _event_data_id_exist(id62), - _event_data_id_exist(id63), - _event_data_id_exist(id64), - _event_data_id_exist(id65), - _event_data_id_exist(id66), - _event_data_id_exist(id67), - _event_data_id_exist(id68), - _event_data_id_exist(id69), - _event_data_id_exist(id70), - _event_data_id_exist(id71), - _event_data_id_exist(id72), - _event_data_id_exist(id73), - _event_data_id_exist(id74), - _event_data_id_exist(id75), - _event_data_id_exist(id76), - _event_data_id_exist(id77), - _event_data_id_exist(id78), - _event_data_id_exist(id79), - _event_data_id_exist(id80), - _event_data_id_exist(id81), - _event_data_id_exist(id82), - _event_data_id_exist(id83), - _event_data_id_exist(id84), - _event_data_id_exist(id85), - _event_data_id_exist(id86), - _event_data_id_exist(id87), - _event_data_id_exist(id88), - _event_data_id_exist(id89), - _event_data_id_exist(id90), - _event_data_id_exist(id91), - _event_data_id_exist(id92), - _event_data_id_exist(id93), - _event_data_id_exist(id94), - _event_data_id_exist(id95), - _event_data_id_exist(id96), - _event_data_id_exist(id97), - _event_data_id_exist(id98), - _event_data_id_exist(id99), - _event_data_id_exist(id100), + lambda: select(EventData.data_id) + .select_from(EventData) + .join( + Events, + and_( + Events.data_id == EventData.data_id, + Events.time_fired_ts + == select(Events.time_fired_ts) + .where(Events.data_id == EventData.data_id) + .limit(1) + .scalar_subquery() + .correlate(EventData), + ), ) + .where(EventData.data_id.in_(data_ids)) ) From d322398d066cca19ebbf7dc3e3b6e94aef015b25 Mon Sep 17 00:00:00 2001 From: Austin Mroczek Date: Sat, 21 Dec 2024 23:59:54 -0800 Subject: [PATCH 601/677] TotalConnect use entry.runtime_data (#133756) * use entry.runtime_data * type the entry * update quality scale * recommended fixes * Update homeassistant/components/totalconnect/alarm_control_panel.py * Update homeassistant/components/totalconnect/binary_sensor.py * Update homeassistant/components/totalconnect/button.py --------- Co-authored-by: Joost Lekkerkerker --- .../components/totalconnect/__init__.py | 25 ++++++++++--------- .../totalconnect/alarm_control_panel.py | 2 +- .../components/totalconnect/binary_sensor.py | 3 +-- .../components/totalconnect/button.py | 3 +-- .../components/totalconnect/diagnostics.py | 4 +-- .../totalconnect/quality_scale.yaml | 2 +- 6 files changed, 18 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/totalconnect/__init__.py b/homeassistant/components/totalconnect/__init__.py index 0d8b915770a..9f291ea15a6 100644 --- a/homeassistant/components/totalconnect/__init__.py +++ b/homeassistant/components/totalconnect/__init__.py @@ -8,13 +8,17 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from .const import AUTO_BYPASS, CONF_USERCODES, DOMAIN +from .const import AUTO_BYPASS, CONF_USERCODES from .coordinator import TotalConnectDataUpdateCoordinator PLATFORMS = [Platform.ALARM_CONTROL_PANEL, Platform.BINARY_SENSOR, Platform.BUTTON] +type TotalConnectConfigEntry = ConfigEntry[TotalConnectDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry( + hass: HomeAssistant, entry: TotalConnectConfigEntry +) -> bool: """Set up upon config entry in user interface.""" conf = entry.data username = conf[CONF_USERNAME] @@ -40,8 +44,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = TotalConnectDataUpdateCoordinator(hass, client) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(update_listener)) @@ -49,18 +52,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: TotalConnectConfigEntry +) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: TotalConnectConfigEntry) -> None: """Update listener.""" bypass = entry.options.get(AUTO_BYPASS, False) - client = hass.data[DOMAIN][entry.entry_id].client + client = entry.runtime_data.client for location_id in client.locations: client.locations[location_id].auto_bypass_low_battery = bypass diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index bc33129a741..48ba78acc92 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -30,7 +30,7 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up TotalConnect alarm panels based on a config entry.""" - coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data code_required = entry.options.get(CODE_REQUIRED, False) async_add_entities( diff --git a/homeassistant/components/totalconnect/binary_sensor.py b/homeassistant/components/totalconnect/binary_sensor.py index 3126efff88a..9a3c2558999 100644 --- a/homeassistant/components/totalconnect/binary_sensor.py +++ b/homeassistant/components/totalconnect/binary_sensor.py @@ -17,7 +17,6 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN from .coordinator import TotalConnectDataUpdateCoordinator from .entity import TotalConnectLocationEntity, TotalConnectZoneEntity @@ -125,7 +124,7 @@ async def async_setup_entry( """Set up TotalConnect device sensors based on a config entry.""" sensors: list = [] - coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data client_locations = coordinator.client.locations diff --git a/homeassistant/components/totalconnect/button.py b/homeassistant/components/totalconnect/button.py index fc5b5e89587..e228f03ec6b 100644 --- a/homeassistant/components/totalconnect/button.py +++ b/homeassistant/components/totalconnect/button.py @@ -12,7 +12,6 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN from .coordinator import TotalConnectDataUpdateCoordinator from .entity import TotalConnectLocationEntity, TotalConnectZoneEntity @@ -43,7 +42,7 @@ async def async_setup_entry( ) -> None: """Set up TotalConnect buttons based on a config entry.""" buttons: list = [] - coordinator: TotalConnectDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data for location_id, location in coordinator.client.locations.items(): buttons.extend( diff --git a/homeassistant/components/totalconnect/diagnostics.py b/homeassistant/components/totalconnect/diagnostics.py index b590c54e2ba..85f52ccc670 100644 --- a/homeassistant/components/totalconnect/diagnostics.py +++ b/homeassistant/components/totalconnect/diagnostics.py @@ -8,8 +8,6 @@ from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN - TO_REDACT = [ "username", "Password", @@ -27,7 +25,7 @@ async def async_get_config_entry_diagnostics( hass: HomeAssistant, config_entry: ConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - client = hass.data[DOMAIN][config_entry.entry_id].client + client = config_entry.runtime_data.client data: dict[str, Any] = {} data["client"] = { diff --git a/homeassistant/components/totalconnect/quality_scale.yaml b/homeassistant/components/totalconnect/quality_scale.yaml index a8e5b60f7ee..fb0f1e5098a 100644 --- a/homeassistant/components/totalconnect/quality_scale.yaml +++ b/homeassistant/components/totalconnect/quality_scale.yaml @@ -4,7 +4,7 @@ rules: test-before-configure: done unique-config-entry: done config-flow-test-coverage: todo - runtime-data: todo + runtime-data: done test-before-setup: todo appropriate-polling: done entity-unique-id: done From cef182c596c7d77441f3f3fb188659c511621c28 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 09:02:58 +0100 Subject: [PATCH 602/677] Bump pyOverkiz to 1.15.4 (#133769) Bump pyoverkiz to 1.15.4 --- homeassistant/components/overkiz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 9ab901d5005..84fdc11ae47 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -20,7 +20,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.15.3"], + "requirements": ["pyoverkiz==1.15.4"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index e7f4aadfe05..56255fc997e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2162,7 +2162,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.3 +pyoverkiz==1.15.4 # homeassistant.components.onewire pyownet==0.10.0.post1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 84fc0f11967..d80ad1320f5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1758,7 +1758,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.3 +pyoverkiz==1.15.4 # homeassistant.components.onewire pyownet==0.10.0.post1 From 284ccbc778edc92a63bf1cfe63d4321a7451fd58 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 09:40:06 +0100 Subject: [PATCH 603/677] Add additional Hitachi sensors to Overkiz (#133772) Add additional Hitachi sensors --- homeassistant/components/overkiz/sensor.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/homeassistant/components/overkiz/sensor.py b/homeassistant/components/overkiz/sensor.py index 184b4938fef..8b20d817921 100644 --- a/homeassistant/components/overkiz/sensor.py +++ b/homeassistant/components/overkiz/sensor.py @@ -458,6 +458,24 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [ state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfTemperature.CELSIUS, ), + # HitachiHeatingSystem/HitachiAirToWaterHeatingZone + OverkizSensorDescription( + key=OverkizState.MODBUS_ROOM_AMBIENT_TEMPERATURE_STATUS_ZONE_1, + name="Room ambient temperature", + native_value=lambda value: cast(float, value), + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + ), + # HitachiHeatingSystem/HitachiAirToWaterMainComponent + OverkizSensorDescription( + key=OverkizState.MODBUS_OUTDOOR_AMBIENT_TEMPERATURE, + name="Outdoor ambient temperature", + native_value=lambda value: cast(int, value), + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + ), ] SUPPORTED_STATES = {description.key: description for description in SENSOR_DESCRIPTIONS} From 0c24afec6c32b8b6b0eac613425804dd6e302d74 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 10:03:16 +0100 Subject: [PATCH 604/677] Update integration quality scale for Peblar Rocksolid EV Chargers (#133764) --- .../components/peblar/quality_scale.yaml | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index 2b0684793a8..78ec3718caf 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -30,8 +30,11 @@ rules: # Silver action-exceptions: todo config-entry-unloading: done - docs-configuration-parameters: todo - docs-installation-parameters: todo + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have any configuration parameters. + docs-installation-parameters: done entity-unavailable: done integration-owner: done log-when-unavailable: done @@ -39,10 +42,10 @@ rules: reauthentication-flow: done test-coverage: todo # Gold - devices: todo + devices: done diagnostics: done - discovery-update-info: todo - discovery: todo + discovery-update-info: done + discovery: done docs-data-update: todo docs-examples: todo docs-known-limitations: todo @@ -54,15 +57,15 @@ rules: status: exempt comment: | This integration connects to a single device. - entity-category: todo - entity-device-class: todo - entity-disabled-by-default: todo - entity-translations: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done exception-translations: status: exempt comment: | The coordinator needs translation when the update failed. - icon-translations: todo + icon-translations: done reconfiguration-flow: todo repair-issues: status: exempt From cd6da9d9e88eee5565814fa2f81f9ee2cee1824e Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sun, 22 Dec 2024 10:07:35 +0100 Subject: [PATCH 605/677] Merge similar tests to parameterized tests for enphase_envoy (#133740) --- .../enphase_envoy/quality_scale.yaml | 2 - .../enphase_envoy/test_config_flow.py | 103 +++++------------- 2 files changed, 29 insertions(+), 76 deletions(-) diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 2b9350ed944..171c07e9474 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -11,8 +11,6 @@ rules: config-flow-test-coverage: status: todo comment: | - - Let's have every test result in either CREATE_ENTRY or ABORT (like test_form_invalid_auth or test_form_cannot_connect, they can be parametrized) - - test_zeroconf_token_firmware and test_zeroconf_pre_token_firmware can also be parametrized I think - test_zero_conf_malformed_serial_property - with pytest.raises(KeyError) as ex:: I don't believe this should be able to raise a KeyError Shouldn't we abort the flow? config-flow: diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index c20e73d774b..121c2583050 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -90,47 +90,23 @@ async def test_user_no_serial_number( } -async def test_form_invalid_auth( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, -) -> None: - """Test we handle invalid auth.""" - mock_envoy.authenticate.side_effect = EnvoyAuthenticationError( - "fail authentication" - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_auth"} - - @pytest.mark.parametrize( ("exception", "error"), [ + (EnvoyAuthenticationError("fail authentication"), "invalid_auth"), (EnvoyError, "cannot_connect"), + (Exception, "unknown"), (ValueError, "unknown"), ], ) -async def test_form_cannot_connect( +async def test_form_errors( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_envoy: AsyncMock, exception: Exception, error: str, ) -> None: - """Test we handle cannot connect error.""" + """Test we handle form errors.""" mock_envoy.setup.side_effect = exception result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -148,41 +124,8 @@ async def test_form_cannot_connect( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": error} - -def _get_schema_default(schema, key_name): - """Iterate schema to find a key.""" - for schema_key in schema: - if schema_key == key_name: - return schema_key.default() - raise KeyError(f"{key_name} not found in schema") - - -async def test_zeroconf_pre_token_firmware( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_envoy: AsyncMock, -) -> None: - """Test we can setup from zeroconf.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("1.1.1.1"), - ip_addresses=[ip_address("1.1.1.1")], - hostname="mock_hostname", - name="mock_name", - port=None, - properties={"serialnum": "1234", "protovers": "3.0.0"}, - type="mock_type", - ), - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - assert ( - _get_schema_default(result["data_schema"].schema, CONF_USERNAME) == "installer" - ) - + mock_envoy.setup.side_effect = None + # mock successful authentication and update of credentials result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -192,20 +135,29 @@ async def test_zeroconf_pre_token_firmware( }, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Envoy 1234" - assert result["result"].unique_id == "1234" - assert result["data"] == { - CONF_HOST: "1.1.1.1", - CONF_NAME: "Envoy 1234", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - } -async def test_zeroconf_token_firmware( +def _get_schema_default(schema, key_name): + """Iterate schema to find a key.""" + for schema_key in schema: + if schema_key == key_name: + return schema_key.default() + raise KeyError(f"{key_name} not found in schema") + + +@pytest.mark.parametrize( + ("version", "schema_username"), + [ + ("7.0.0", ""), + ("3.0.0", "installer"), + ], +) +async def test_zeroconf( hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_envoy: AsyncMock, + version: str, + schema_username: str, ) -> None: """Test we can setup from zeroconf.""" result = await hass.config_entries.flow.async_init( @@ -217,13 +169,16 @@ async def test_zeroconf_token_firmware( hostname="mock_hostname", name="mock_name", port=None, - properties={"serialnum": "1234", "protovers": "7.0.0"}, + properties={"serialnum": "1234", "protovers": version}, type="mock_type", ), ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert _get_schema_default(result["data_schema"].schema, CONF_USERNAME) == "" + assert ( + _get_schema_default(result["data_schema"].schema, CONF_USERNAME) + == schema_username + ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], From c3d0a01776cf679eab25b15319bbdb0751bda5f1 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 10:25:59 +0100 Subject: [PATCH 606/677] Migrate to runtime data in Overkiz (#133760) * Migrate to runtime data * Revert * Improve typing --- homeassistant/components/overkiz/__init__.py | 21 +++++----- .../components/overkiz/alarm_control_panel.py | 8 ++-- .../components/overkiz/binary_sensor.py | 5 +-- homeassistant/components/overkiz/button.py | 9 ++-- .../components/overkiz/climate/__init__.py | 8 ++-- .../components/overkiz/cover/__init__.py | 10 ++--- .../components/overkiz/diagnostics.py | 15 +++---- homeassistant/components/overkiz/light.py | 8 ++-- homeassistant/components/overkiz/lock.py | 8 ++-- homeassistant/components/overkiz/number.py | 9 ++-- homeassistant/components/overkiz/scene.py | 8 ++-- homeassistant/components/overkiz/select.py | 9 ++-- homeassistant/components/overkiz/sensor.py | 7 ++-- homeassistant/components/overkiz/siren.py | 8 ++-- homeassistant/components/overkiz/switch.py | 8 ++-- .../components/overkiz/water_heater.py | 42 ------------------- .../overkiz/water_heater/__init__.py | 8 ++-- 17 files changed, 63 insertions(+), 128 deletions(-) delete mode 100644 homeassistant/components/overkiz/water_heater.py diff --git a/homeassistant/components/overkiz/__init__.py b/homeassistant/components/overkiz/__init__.py index ce877e15261..2b4a0367bf7 100644 --- a/homeassistant/components/overkiz/__init__.py +++ b/homeassistant/components/overkiz/__init__.py @@ -47,14 +47,17 @@ from .coordinator import OverkizDataUpdateCoordinator @dataclass class HomeAssistantOverkizData: - """Overkiz data stored in the Home Assistant data object.""" + """Overkiz data stored in the runtime data object.""" coordinator: OverkizDataUpdateCoordinator platforms: defaultdict[Platform, list[Device]] scenarios: list[Scenario] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type OverkizDataConfigEntry = ConfigEntry[HomeAssistantOverkizData] + + +async def async_setup_entry(hass: HomeAssistant, entry: OverkizDataConfigEntry) -> bool: """Set up Overkiz from a config entry.""" client: OverkizClient | None = None api_type = entry.data.get(CONF_API_TYPE, APIType.CLOUD) @@ -123,7 +126,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: platforms: defaultdict[Platform, list[Device]] = defaultdict(list) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = HomeAssistantOverkizData( + entry.runtime_data = HomeAssistantOverkizData( coordinator=coordinator, platforms=platforms, scenarios=scenarios ) @@ -162,17 +165,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: OverkizDataConfigEntry +) -> bool: """Unload a config entry.""" - - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def _async_migrate_entries( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: OverkizDataConfigEntry ) -> bool: """Migrate old entries to new unique IDs.""" entity_registry = er.async_get(hass) diff --git a/homeassistant/components/overkiz/alarm_control_panel.py b/homeassistant/components/overkiz/alarm_control_panel.py index bdbf4d0cc8d..90c135291c3 100644 --- a/homeassistant/components/overkiz/alarm_control_panel.py +++ b/homeassistant/components/overkiz/alarm_control_panel.py @@ -16,14 +16,12 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntityFeature, AlarmControlPanelState, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry from .coordinator import OverkizDataUpdateCoordinator from .entity import OverkizDescriptiveEntity @@ -210,11 +208,11 @@ SUPPORTED_DEVICES = {description.key: description for description in ALARM_DESCR async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz alarm control panel from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizAlarmControlPanel( diff --git a/homeassistant/components/overkiz/binary_sensor.py b/homeassistant/components/overkiz/binary_sensor.py index 57df3cd4e09..7d0fee6f70e 100644 --- a/homeassistant/components/overkiz/binary_sensor.py +++ b/homeassistant/components/overkiz/binary_sensor.py @@ -18,8 +18,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES +from .const import IGNORED_OVERKIZ_DEVICES from .entity import OverkizDescriptiveEntity @@ -147,7 +146,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz binary sensors from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[OverkizBinarySensor] = [] for device in data.coordinator.data.values(): diff --git a/homeassistant/components/overkiz/button.py b/homeassistant/components/overkiz/button.py index c34be5cde84..92711ac8ca8 100644 --- a/homeassistant/components/overkiz/button.py +++ b/homeassistant/components/overkiz/button.py @@ -12,13 +12,12 @@ from homeassistant.components.button import ( ButtonEntity, ButtonEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES +from . import OverkizDataConfigEntry +from .const import IGNORED_OVERKIZ_DEVICES from .entity import OverkizDescriptiveEntity @@ -100,11 +99,11 @@ SUPPORTED_COMMANDS = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz button from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[ButtonEntity] = [] for device in data.coordinator.data.values(): diff --git a/homeassistant/components/overkiz/climate/__init__.py b/homeassistant/components/overkiz/climate/__init__.py index 97840df7a41..77ca23b9ae1 100644 --- a/homeassistant/components/overkiz/climate/__init__.py +++ b/homeassistant/components/overkiz/climate/__init__.py @@ -7,14 +7,12 @@ from enum import StrEnum, unique from pyoverkiz.enums import Protocol from pyoverkiz.enums.ui import UIWidget -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import HomeAssistantOverkizData -from ..const import DOMAIN +from .. import OverkizDataConfigEntry from .atlantic_electrical_heater import AtlanticElectricalHeater from .atlantic_electrical_heater_with_adjustable_temperature_setpoint import ( AtlanticElectricalHeaterWithAdjustableTemperatureSetpoint, @@ -79,11 +77,11 @@ WIDGET_AND_PROTOCOL_TO_CLIMATE_ENTITY = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz climate from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data # Match devices based on the widget. entities_based_on_widget: list[Entity] = [ diff --git a/homeassistant/components/overkiz/cover/__init__.py b/homeassistant/components/overkiz/cover/__init__.py index f9df3256253..38c02eba1bb 100644 --- a/homeassistant/components/overkiz/cover/__init__.py +++ b/homeassistant/components/overkiz/cover/__init__.py @@ -2,23 +2,23 @@ from pyoverkiz.enums import OverkizCommand, UIClass -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import HomeAssistantOverkizData -from ..const import DOMAIN +from .. import OverkizDataConfigEntry from .awning import Awning from .generic_cover import OverkizGenericCover from .vertical_cover import LowSpeedCover, VerticalCover async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: OverkizDataConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz covers from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[OverkizGenericCover] = [ Awning(device.device_url, data.coordinator) diff --git a/homeassistant/components/overkiz/diagnostics.py b/homeassistant/components/overkiz/diagnostics.py index 427230b9c82..dae0c6c59cf 100644 --- a/homeassistant/components/overkiz/diagnostics.py +++ b/homeassistant/components/overkiz/diagnostics.py @@ -7,20 +7,18 @@ from typing import Any from pyoverkiz.enums import APIType from pyoverkiz.obfuscate import obfuscate_id -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntry -from . import HomeAssistantOverkizData -from .const import CONF_API_TYPE, CONF_HUB, DOMAIN +from . import OverkizDataConfigEntry +from .const import CONF_API_TYPE, CONF_HUB async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: OverkizDataConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - entry_data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] - client = entry_data.coordinator.client + client = entry.runtime_data.coordinator.client data = { "setup": await client.get_diagnostic_data(), @@ -39,11 +37,10 @@ async def async_get_config_entry_diagnostics( async def async_get_device_diagnostics( - hass: HomeAssistant, entry: ConfigEntry, device: DeviceEntry + hass: HomeAssistant, entry: OverkizDataConfigEntry, device: DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device entry.""" - entry_data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] - client = entry_data.coordinator.client + client = entry.runtime_data.coordinator.client device_url = min(device.identifiers)[1] diff --git a/homeassistant/components/overkiz/light.py b/homeassistant/components/overkiz/light.py index 18d724dd63a..933d4cf695b 100644 --- a/homeassistant/components/overkiz/light.py +++ b/homeassistant/components/overkiz/light.py @@ -12,24 +12,22 @@ from homeassistant.components.light import ( ColorMode, LightEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry from .coordinator import OverkizDataUpdateCoordinator from .entity import OverkizEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz lights from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizLight(device.device_url, data.coordinator) diff --git a/homeassistant/components/overkiz/lock.py b/homeassistant/components/overkiz/lock.py index 2494903d076..1c073d2f9aa 100644 --- a/homeassistant/components/overkiz/lock.py +++ b/homeassistant/components/overkiz/lock.py @@ -7,23 +7,21 @@ from typing import Any from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState from homeassistant.components.lock import LockEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry from .entity import OverkizEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz locks from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizLock(device.device_url, data.coordinator) diff --git a/homeassistant/components/overkiz/number.py b/homeassistant/components/overkiz/number.py index 494d430c393..0e03e822424 100644 --- a/homeassistant/components/overkiz/number.py +++ b/homeassistant/components/overkiz/number.py @@ -14,13 +14,12 @@ from homeassistant.components.number import ( NumberEntity, NumberEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES +from . import OverkizDataConfigEntry +from .const import IGNORED_OVERKIZ_DEVICES from .coordinator import OverkizDataUpdateCoordinator from .entity import OverkizDescriptiveEntity @@ -191,11 +190,11 @@ SUPPORTED_STATES = {description.key: description for description in NUMBER_DESCR async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz number from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[OverkizNumber] = [] for device in data.coordinator.data.values(): diff --git a/homeassistant/components/overkiz/scene.py b/homeassistant/components/overkiz/scene.py index 8cbbb9dbe5d..4533ed3245c 100644 --- a/homeassistant/components/overkiz/scene.py +++ b/homeassistant/components/overkiz/scene.py @@ -8,21 +8,19 @@ from pyoverkiz.client import OverkizClient from pyoverkiz.models import Scenario from homeassistant.components.scene import Scene -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz scenes from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizScene(scene, data.coordinator.client) for scene in data.scenarios diff --git a/homeassistant/components/overkiz/select.py b/homeassistant/components/overkiz/select.py index 83cdc9c4f2b..ac467eaaa7a 100644 --- a/homeassistant/components/overkiz/select.py +++ b/homeassistant/components/overkiz/select.py @@ -8,13 +8,12 @@ from dataclasses import dataclass from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN, IGNORED_OVERKIZ_DEVICES +from . import OverkizDataConfigEntry +from .const import IGNORED_OVERKIZ_DEVICES from .entity import OverkizDescriptiveEntity @@ -129,11 +128,11 @@ SUPPORTED_STATES = {description.key: description for description in SELECT_DESCR async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz select from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[OverkizSelect] = [] for device in data.coordinator.data.values(): diff --git a/homeassistant/components/overkiz/sensor.py b/homeassistant/components/overkiz/sensor.py index 8b20d817921..84d25b01d24 100644 --- a/homeassistant/components/overkiz/sensor.py +++ b/homeassistant/components/overkiz/sensor.py @@ -15,7 +15,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, @@ -34,7 +33,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import HomeAssistantOverkizData +from . import OverkizDataConfigEntry from .const import ( DOMAIN, IGNORED_OVERKIZ_DEVICES, @@ -483,11 +482,11 @@ SUPPORTED_STATES = {description.key: description for description in SENSOR_DESCR async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz sensors from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[SensorEntity] = [] for device in data.coordinator.data.values(): diff --git a/homeassistant/components/overkiz/siren.py b/homeassistant/components/overkiz/siren.py index a7ba41e2fef..f7246e50ec0 100644 --- a/homeassistant/components/overkiz/siren.py +++ b/homeassistant/components/overkiz/siren.py @@ -10,23 +10,21 @@ from homeassistant.components.siren import ( SirenEntity, SirenEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry from .entity import OverkizEntity async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz sirens from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizSiren(device.device_url, data.coordinator) diff --git a/homeassistant/components/overkiz/switch.py b/homeassistant/components/overkiz/switch.py index ac3ea351559..c921dbab776 100644 --- a/homeassistant/components/overkiz/switch.py +++ b/homeassistant/components/overkiz/switch.py @@ -15,13 +15,11 @@ from homeassistant.components.switch import ( SwitchEntity, SwitchEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import HomeAssistantOverkizData -from .const import DOMAIN +from . import OverkizDataConfigEntry from .entity import OverkizDescriptiveEntity @@ -111,11 +109,11 @@ SUPPORTED_DEVICES = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz switch from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( OverkizSwitch( diff --git a/homeassistant/components/overkiz/water_heater.py b/homeassistant/components/overkiz/water_heater.py deleted file mode 100644 index 99bfb279e4c..00000000000 --- a/homeassistant/components/overkiz/water_heater.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Support for Overkiz water heater devices.""" - -from __future__ import annotations - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from . import HomeAssistantOverkizData -from .const import DOMAIN -from .entity import OverkizEntity -from .water_heater_entities import ( - CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY, - WIDGET_TO_WATER_HEATER_ENTITY, -) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up the Overkiz DHW from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] - entities: list[OverkizEntity] = [] - - for device in data.platforms[Platform.WATER_HEATER]: - if device.controllable_name in CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY: - entities.append( - CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY[device.controllable_name]( - device.device_url, data.coordinator - ) - ) - elif device.widget in WIDGET_TO_WATER_HEATER_ENTITY: - entities.append( - WIDGET_TO_WATER_HEATER_ENTITY[device.widget]( - device.device_url, data.coordinator - ) - ) - - async_add_entities(entities) diff --git a/homeassistant/components/overkiz/water_heater/__init__.py b/homeassistant/components/overkiz/water_heater/__init__.py index 1fb5e5696bd..1dd1d596a33 100644 --- a/homeassistant/components/overkiz/water_heater/__init__.py +++ b/homeassistant/components/overkiz/water_heater/__init__.py @@ -4,13 +4,11 @@ from __future__ import annotations from pyoverkiz.enums.ui import UIWidget -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .. import HomeAssistantOverkizData -from ..const import DOMAIN +from .. import OverkizDataConfigEntry from ..entity import OverkizEntity from .atlantic_domestic_hot_water_production_mlb_component import ( AtlanticDomesticHotWaterProductionMBLComponent, @@ -22,11 +20,11 @@ from .hitachi_dhw import HitachiDHW async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz DHW from a config entry.""" - data: HomeAssistantOverkizData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data entities: list[OverkizEntity] = [] for device in data.platforms[Platform.WATER_HEATER]: From 3f1acff6521d457c9c40ff7f09c82e3fd5aa1db2 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 10:31:09 +0100 Subject: [PATCH 607/677] Add support for HitachiAirToWaterHeatingZone in Overkiz (#133768) * Add support for HitachiAirToWaterHeatingZone in Overkiz * Clean up * Fix typing * Fix typing * Fix typing * Adapt to new PyOverkiz --- .../components/overkiz/climate/__init__.py | 2 + .../hitachi_air_to_water_heating_zone.py | 123 ++++++++++++++++++ homeassistant/components/overkiz/const.py | 1 + 3 files changed, 126 insertions(+) create mode 100644 homeassistant/components/overkiz/climate/hitachi_air_to_water_heating_zone.py diff --git a/homeassistant/components/overkiz/climate/__init__.py b/homeassistant/components/overkiz/climate/__init__.py index 77ca23b9ae1..1398bb7c25a 100644 --- a/homeassistant/components/overkiz/climate/__init__.py +++ b/homeassistant/components/overkiz/climate/__init__.py @@ -27,6 +27,7 @@ from .atlantic_pass_apc_zone_control import AtlanticPassAPCZoneControl from .atlantic_pass_apc_zone_control_zone import AtlanticPassAPCZoneControlZone from .hitachi_air_to_air_heat_pump_hlrrwifi import HitachiAirToAirHeatPumpHLRRWIFI from .hitachi_air_to_air_heat_pump_ovp import HitachiAirToAirHeatPumpOVP +from .hitachi_air_to_water_heating_zone import HitachiAirToWaterHeatingZone from .somfy_heating_temperature_interface import SomfyHeatingTemperatureInterface from .somfy_thermostat import SomfyThermostat from .valve_heating_temperature_interface import ValveHeatingTemperatureInterface @@ -51,6 +52,7 @@ WIDGET_TO_CLIMATE_ENTITY = { UIWidget.ATLANTIC_HEAT_RECOVERY_VENTILATION: AtlanticHeatRecoveryVentilation, UIWidget.ATLANTIC_PASS_APC_HEATING_ZONE: AtlanticPassAPCHeatingZone, UIWidget.ATLANTIC_PASS_APC_ZONE_CONTROL: AtlanticPassAPCZoneControl, + UIWidget.HITACHI_AIR_TO_WATER_HEATING_ZONE: HitachiAirToWaterHeatingZone, UIWidget.SOMFY_HEATING_TEMPERATURE_INTERFACE: SomfyHeatingTemperatureInterface, UIWidget.SOMFY_THERMOSTAT: SomfyThermostat, UIWidget.VALVE_HEATING_TEMPERATURE_INTERFACE: ValveHeatingTemperatureInterface, diff --git a/homeassistant/components/overkiz/climate/hitachi_air_to_water_heating_zone.py b/homeassistant/components/overkiz/climate/hitachi_air_to_water_heating_zone.py new file mode 100644 index 00000000000..8410e50873d --- /dev/null +++ b/homeassistant/components/overkiz/climate/hitachi_air_to_water_heating_zone.py @@ -0,0 +1,123 @@ +"""Support for HitachiAirToWaterHeatingZone.""" + +from __future__ import annotations + +from typing import Any, cast + +from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState + +from homeassistant.components.climate import ( + PRESET_COMFORT, + PRESET_ECO, + PRESET_NONE, + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature + +from ..const import DOMAIN +from ..entity import OverkizDataUpdateCoordinator, OverkizEntity + +OVERKIZ_TO_HVAC_MODE: dict[str, HVACMode] = { + OverkizCommandParam.MANU: HVACMode.HEAT, + OverkizCommandParam.AUTO: HVACMode.AUTO, +} + +HVAC_MODE_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_HVAC_MODE.items()} + +OVERKIZ_TO_PRESET_MODE: dict[str, str] = { + OverkizCommandParam.COMFORT: PRESET_COMFORT, + OverkizCommandParam.ECO: PRESET_ECO, +} + +PRESET_MODE_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_PRESET_MODE.items()} + + +class HitachiAirToWaterHeatingZone(OverkizEntity, ClimateEntity): + """Representation of HitachiAirToWaterHeatingZone.""" + + _attr_hvac_modes = [*HVAC_MODE_TO_OVERKIZ] + _attr_preset_modes = [*PRESET_MODE_TO_OVERKIZ] + _attr_supported_features = ( + ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE + ) + _attr_min_temp = 5.0 + _attr_max_temp = 35.0 + _attr_precision = 0.1 + _attr_target_temperature_step = 0.5 + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = DOMAIN + + def __init__( + self, device_url: str, coordinator: OverkizDataUpdateCoordinator + ) -> None: + """Init method.""" + super().__init__(device_url, coordinator) + + if self._attr_device_info: + self._attr_device_info["manufacturer"] = "Hitachi" + + @property + def hvac_mode(self) -> HVACMode: + """Return hvac operation ie. heat, cool mode.""" + if ( + state := self.device.states[OverkizState.MODBUS_AUTO_MANU_MODE_ZONE_1] + ) and state.value_as_str: + return OVERKIZ_TO_HVAC_MODE[state.value_as_str] + + return HVACMode.OFF + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + await self.executor.async_execute_command( + OverkizCommand.SET_AUTO_MANU_MODE, HVAC_MODE_TO_OVERKIZ[hvac_mode] + ) + + @property + def preset_mode(self) -> str | None: + """Return the current preset mode, e.g., home, away, temp.""" + if ( + state := self.device.states[OverkizState.MODBUS_YUTAKI_TARGET_MODE] + ) and state.value_as_str: + return OVERKIZ_TO_PRESET_MODE[state.value_as_str] + + return PRESET_NONE + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + await self.executor.async_execute_command( + OverkizCommand.SET_TARGET_MODE, PRESET_MODE_TO_OVERKIZ[preset_mode] + ) + + @property + def current_temperature(self) -> float | None: + """Return the current temperature.""" + current_temperature = self.device.states[ + OverkizState.MODBUS_ROOM_AMBIENT_TEMPERATURE_STATUS_ZONE_1 + ] + + if current_temperature: + return current_temperature.value_as_float + + return None + + @property + def target_temperature(self) -> float | None: + """Return the temperature we try to reach.""" + target_temperature = self.device.states[ + OverkizState.MODBUS_THERMOSTAT_SETTING_CONTROL_ZONE_1 + ] + + if target_temperature: + return target_temperature.value_as_float + + return None + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + temperature = cast(float, kwargs.get(ATTR_TEMPERATURE)) + + await self.executor.async_execute_command( + OverkizCommand.SET_THERMOSTAT_SETTING_CONTROL_ZONE_1, int(temperature) + ) diff --git a/homeassistant/components/overkiz/const.py b/homeassistant/components/overkiz/const.py index a90260e0f0f..e596b566717 100644 --- a/homeassistant/components/overkiz/const.py +++ b/homeassistant/components/overkiz/const.py @@ -102,6 +102,7 @@ OVERKIZ_DEVICE_TO_PLATFORM: dict[UIClass | UIWidget, Platform | None] = { UIWidget.DOMESTIC_HOT_WATER_PRODUCTION: Platform.WATER_HEATER, # widgetName, uiClass is WaterHeatingSystem (not supported) UIWidget.DOMESTIC_HOT_WATER_TANK: Platform.SWITCH, # widgetName, uiClass is WaterHeatingSystem (not supported) UIWidget.HITACHI_AIR_TO_AIR_HEAT_PUMP: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) + UIWidget.HITACHI_AIR_TO_WATER_HEATING_ZONE: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) UIWidget.HITACHI_DHW: Platform.WATER_HEATER, # widgetName, uiClass is HitachiHeatingSystem (not supported) UIWidget.MY_FOX_ALARM_CONTROLLER: Platform.ALARM_CONTROL_PANEL, # widgetName, uiClass is Alarm (not supported) UIWidget.MY_FOX_SECURITY_CAMERA: Platform.SWITCH, # widgetName, uiClass is Camera (not supported) From 619aed39b70fb6eb4712cb4fad643e461b16f765 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 10:36:07 +0100 Subject: [PATCH 608/677] Use new UnitOfEnergy constants in Overkiz (#133778) --- homeassistant/components/overkiz/const.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/overkiz/const.py b/homeassistant/components/overkiz/const.py index e596b566717..1a89fecf9c0 100644 --- a/homeassistant/components/overkiz/const.py +++ b/homeassistant/components/overkiz/const.py @@ -142,8 +142,8 @@ OVERKIZ_UNIT_TO_HA: dict[str, str] = { MeasuredValueType.ELECTRICAL_POWER_IN_W: UnitOfPower.WATT, MeasuredValueType.ELECTRIC_CURRENT_IN_AMPERE: UnitOfElectricCurrent.AMPERE, MeasuredValueType.ELECTRIC_CURRENT_IN_MILLI_AMPERE: UnitOfElectricCurrent.MILLIAMPERE, - MeasuredValueType.ENERGY_IN_CAL: "cal", - MeasuredValueType.ENERGY_IN_KCAL: "kcal", + MeasuredValueType.ENERGY_IN_CAL: UnitOfEnergy.CALORIE, + MeasuredValueType.ENERGY_IN_KCAL: UnitOfEnergy.KILO_CALORIE, MeasuredValueType.FLOW_IN_LITRE_PER_SECOND: f"{UnitOfVolume.LITERS}/{UnitOfTime.SECONDS}", MeasuredValueType.FLOW_IN_METER_CUBE_PER_HOUR: UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, MeasuredValueType.FLOW_IN_METER_CUBE_PER_SECOND: f"{UnitOfVolume.CUBIC_METERS}/{UnitOfTime.SECONDS}", From 84d359c0d94b4188c139253ec7d6cd93fbb793e6 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 11:33:32 +0100 Subject: [PATCH 609/677] Fix binary_sensor typing in Overkiz (#133782) --- homeassistant/components/overkiz/binary_sensor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/overkiz/binary_sensor.py b/homeassistant/components/overkiz/binary_sensor.py index 7d0fee6f70e..3a75cd77c2f 100644 --- a/homeassistant/components/overkiz/binary_sensor.py +++ b/homeassistant/components/overkiz/binary_sensor.py @@ -14,10 +14,10 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import OverkizDataConfigEntry from .const import IGNORED_OVERKIZ_DEVICES from .entity import OverkizDescriptiveEntity @@ -142,7 +142,7 @@ SUPPORTED_STATES = { async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: OverkizDataConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Overkiz binary sensors from a config entry.""" From 31c6443a9bb51faeae1164db771fd8a18eb31682 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 11:51:01 +0100 Subject: [PATCH 610/677] Add button platform to Peblar Rocksolid EV Chargers integration (#133780) --- homeassistant/components/peblar/__init__.py | 1 + homeassistant/components/peblar/button.py | 92 ++++++++++++++++++ .../peblar/snapshots/test_button.ambr | 95 +++++++++++++++++++ tests/components/peblar/test_button.py | 36 +++++++ 4 files changed, 224 insertions(+) create mode 100644 homeassistant/components/peblar/button.py create mode 100644 tests/components/peblar/snapshots/test_button.ambr create mode 100644 tests/components/peblar/test_button.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index a055a1a02c8..c185a0e2550 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -30,6 +30,7 @@ from .coordinator import ( PLATFORMS = [ Platform.BINARY_SENSOR, + Platform.BUTTON, Platform.NUMBER, Platform.SELECT, Platform.SENSOR, diff --git a/homeassistant/components/peblar/button.py b/homeassistant/components/peblar/button.py new file mode 100644 index 00000000000..0b0f12be1b3 --- /dev/null +++ b/homeassistant/components/peblar/button.py @@ -0,0 +1,92 @@ +"""Support for Peblar button.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from peblar import Peblar + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator + + +@dataclass(frozen=True, kw_only=True) +class PeblarButtonEntityDescription(ButtonEntityDescription): + """Describe a Peblar button.""" + + press_fn: Callable[[Peblar], Awaitable[Any]] + + +DESCRIPTIONS = [ + PeblarButtonEntityDescription( + key="identify", + device_class=ButtonDeviceClass.IDENTIFY, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + press_fn=lambda x: x.identify(), + ), + PeblarButtonEntityDescription( + key="reboot", + device_class=ButtonDeviceClass.RESTART, + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + press_fn=lambda x: x.reboot(), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PeblarConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Peblar buttons based on a config entry.""" + async_add_entities( + PeblarButtonEntity( + entry=entry, + description=description, + ) + for description in DESCRIPTIONS + ) + + +class PeblarButtonEntity( + CoordinatorEntity[PeblarUserConfigurationDataUpdateCoordinator], ButtonEntity +): + """Defines an Peblar button.""" + + entity_description: PeblarButtonEntityDescription + + _attr_has_entity_name = True + + def __init__( + self, + entry: PeblarConfigEntry, + description: PeblarButtonEntityDescription, + ) -> None: + """Initialize the button entity.""" + super().__init__(coordinator=entry.runtime_data.user_configuraton_coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}_{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + ) + + async def async_press(self) -> None: + """Trigger button press on the Peblar device.""" + await self.entity_description.press_fn(self.coordinator.peblar) diff --git a/tests/components/peblar/snapshots/test_button.ambr b/tests/components/peblar/snapshots/test_button.ambr new file mode 100644 index 00000000000..96aab5c93ef --- /dev/null +++ b/tests/components/peblar/snapshots/test_button.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_entities[button][button.peblar_ev_charger_identify-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.peblar_ev_charger_identify', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Identify', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_identify', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[button][button.peblar_ev_charger_identify-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'identify', + 'friendly_name': 'Peblar EV Charger Identify', + }), + 'context': , + 'entity_id': 'button.peblar_ev_charger_identify', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entities[button][button.peblar_ev_charger_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.peblar_ev_charger_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'peblar', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '23-45-A4O-MOF_reboot', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[button][button.peblar_ev_charger_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Peblar EV Charger Restart', + }), + 'context': , + 'entity_id': 'button.peblar_ev_charger_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/peblar/test_button.py b/tests/components/peblar/test_button.py new file mode 100644 index 00000000000..7b271d3747a --- /dev/null +++ b/tests/components/peblar/test_button.py @@ -0,0 +1,36 @@ +"""Tests for the Peblar button platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.freeze_time("2024-12-21 21:45:00") +@pytest.mark.parametrize("init_integration", [Platform.BUTTON], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the button entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Ensure all entities are correctly assigned to the Peblar device + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, "23-45-A4O-MOF")} + ) + assert device_entry + entity_entries = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + for entity_entry in entity_entries: + assert entity_entry.device_id == device_entry.id From 7be3cad1db91b0cab526cfda1764ca23935b6e14 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 22 Dec 2024 12:00:24 +0100 Subject: [PATCH 611/677] Refactor Twinkly tests (#133725) --- homeassistant/components/twinkly/strings.json | 2 +- tests/components/twinkly/__init__.py | 121 +---- tests/components/twinkly/conftest.py | 89 ++-- tests/components/twinkly/const.py | 5 + .../twinkly/fixtures/get_current_movie.json | 3 + .../twinkly/fixtures/get_details.json | 23 + .../fixtures/get_firmware_version.json | 1 + .../twinkly/fixtures/get_saved_movies.json | 4 + .../twinkly/snapshots/test_diagnostics.ambr | 55 ++- .../twinkly/snapshots/test_light.ambr | 75 ++++ tests/components/twinkly/test_config_flow.py | 306 ++++++------- tests/components/twinkly/test_diagnostics.py | 22 +- tests/components/twinkly/test_init.py | 80 ++-- tests/components/twinkly/test_light.py | 412 ++++++++---------- 14 files changed, 575 insertions(+), 623 deletions(-) create mode 100644 tests/components/twinkly/const.py create mode 100644 tests/components/twinkly/fixtures/get_current_movie.json create mode 100644 tests/components/twinkly/fixtures/get_details.json create mode 100644 tests/components/twinkly/fixtures/get_firmware_version.json create mode 100644 tests/components/twinkly/fixtures/get_saved_movies.json create mode 100644 tests/components/twinkly/snapshots/test_light.ambr diff --git a/homeassistant/components/twinkly/strings.json b/homeassistant/components/twinkly/strings.json index 88bc67abbbd..d27de8a75de 100644 --- a/homeassistant/components/twinkly/strings.json +++ b/homeassistant/components/twinkly/strings.json @@ -17,7 +17,7 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "device_exists": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } } } diff --git a/tests/components/twinkly/__init__.py b/tests/components/twinkly/__init__.py index 192a5c0e220..7b0ca20fbe1 100644 --- a/tests/components/twinkly/__init__.py +++ b/tests/components/twinkly/__init__.py @@ -1,120 +1,13 @@ """Constants and mock for the twinkly component tests.""" -from aiohttp.client_exceptions import ClientConnectionError +from homeassistant.core import HomeAssistant -from homeassistant.components.twinkly.const import DEV_NAME - -TEST_HOST = "test.twinkly.com" -TEST_ID = "twinkly_test_device_id" -TEST_UID = "4c8fccf5-e08a-4173-92d5-49bf479252a2" -TEST_MAC = "aa:bb:cc:dd:ee:ff" -TEST_NAME = "twinkly_test_device_name" -TEST_NAME_ORIGINAL = "twinkly_test_original_device_name" # the original (deprecated) name stored in the conf -TEST_MODEL = "twinkly_test_device_model" +from tests.common import MockConfigEntry -class ClientMock: - """A mock of the ttls.client.Twinkly.""" +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) - def __init__(self) -> None: - """Create a mocked client.""" - self.is_offline = False - self.state = True - self.brightness = {"mode": "enabled", "value": 10} - self.color = None - self.movies = [{"id": 1, "name": "Rainbow"}, {"id": 2, "name": "Flare"}] - self.current_movie = {} - self.default_mode = "movie" - self.mode = None - self.version = "2.8.10" - - self.id = TEST_UID - self.device_info = { - "uuid": self.id, - "device_name": TEST_NAME, - "mac": TEST_MAC, - "product_code": TEST_MODEL, - } - - @property - def host(self) -> str: - """Get the mocked host.""" - return TEST_HOST - - async def get_details(self): - """Get the mocked device info.""" - if self.is_offline: - raise ClientConnectionError - return self.device_info - - async def is_on(self) -> bool: - """Get the mocked on/off state.""" - if self.is_offline: - raise ClientConnectionError - return self.state - - async def turn_on(self) -> None: - """Set the mocked on state.""" - if self.is_offline: - raise ClientConnectionError - self.state = True - self.mode = self.default_mode - - async def turn_off(self) -> None: - """Set the mocked off state.""" - if self.is_offline: - raise ClientConnectionError - self.state = False - - async def get_brightness(self) -> int: - """Get the mocked brightness.""" - if self.is_offline: - raise ClientConnectionError - return self.brightness - - async def set_brightness(self, brightness: int) -> None: - """Set the mocked brightness.""" - if self.is_offline: - raise ClientConnectionError - self.brightness = {"mode": "enabled", "value": brightness} - - def change_name(self, new_name: str) -> None: - """Change the name of this virtual device.""" - self.device_info[DEV_NAME] = new_name - - async def set_static_colour(self, colour) -> None: - """Set static color.""" - self.color = colour - self.default_mode = "color" - - async def set_cycle_colours(self, colour) -> None: - """Set static color.""" - self.color = colour - self.default_mode = "movie" - - async def interview(self) -> None: - """Interview.""" - - async def get_saved_movies(self) -> dict: - """Get saved movies.""" - return self.movies - - async def get_current_movie(self) -> dict: - """Get current movie.""" - return self.current_movie - - async def set_current_movie(self, movie_id: int) -> dict: - """Set current movie.""" - self.current_movie = {"id": movie_id} - - async def set_mode(self, mode: str) -> None: - """Set mode.""" - if mode == "off": - await self.turn_off() - else: - await self.turn_on() - self.mode = mode - - async def get_firmware_version(self) -> dict: - """Get firmware version.""" - return {"version": self.version} + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/twinkly/conftest.py b/tests/components/twinkly/conftest.py index 19361af2003..6b32c786c99 100644 --- a/tests/components/twinkly/conftest.py +++ b/tests/components/twinkly/conftest.py @@ -1,55 +1,74 @@ """Configure tests for the Twinkly integration.""" -from collections.abc import Awaitable, Callable, Coroutine -from typing import Any -from unittest.mock import patch +from collections.abc import Generator +from unittest.mock import AsyncMock, patch import pytest -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component +from homeassistant.components.twinkly import DOMAIN +from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME -from . import TEST_MODEL, TEST_NAME, TEST_UID, ClientMock +from .const import TEST_MAC, TEST_MODEL, TEST_NAME -from tests.common import MockConfigEntry - -type ComponentSetup = Callable[[], Awaitable[ClientMock]] - -DOMAIN = "twinkly" -TITLE = "Twinkly" +from tests.common import ( + MockConfigEntry, + load_json_array_fixture, + load_json_object_fixture, +) -@pytest.fixture(name="config_entry") +@pytest.fixture def mock_config_entry() -> MockConfigEntry: """Create Twinkly entry in Home Assistant.""" - client = ClientMock() return MockConfigEntry( domain=DOMAIN, - title=TITLE, - unique_id=TEST_UID, - entry_id=TEST_UID, + title="Twinkly", + unique_id=TEST_MAC, data={ - "host": client.host, - "id": client.id, - "name": TEST_NAME, - "model": TEST_MODEL, - "device_name": TEST_NAME, + CONF_HOST: "192.168.0.123", + CONF_ID: "497dcba3-ecbf-4587-a2dd-5eb0665e6880", + CONF_NAME: TEST_NAME, + CONF_MODEL: TEST_MODEL, }, + entry_id="01JFMME2P6RA38V5AMPCJ2JYYV", + minor_version=2, ) -@pytest.fixture(name="setup_integration") -async def mock_setup_integration( - hass: HomeAssistant, config_entry: MockConfigEntry -) -> Callable[[], Coroutine[Any, Any, ClientMock]]: - """Fixture for setting up the component.""" - config_entry.add_to_hass(hass) +@pytest.fixture +def mock_twinkly_client() -> Generator[AsyncMock]: + """Mock the Twinkly client.""" + with ( + patch( + "homeassistant.components.twinkly.Twinkly", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.twinkly.config_flow.Twinkly", + new=mock_client, + ), + ): + client = mock_client.return_value + client.get_details.return_value = load_json_object_fixture( + "get_details.json", DOMAIN + ) + client.get_firmware_version.return_value = load_json_object_fixture( + "get_firmware_version.json", DOMAIN + ) + client.get_saved_movies.return_value = load_json_array_fixture( + "get_saved_movies.json", DOMAIN + ) + client.get_current_movie.return_value = load_json_object_fixture( + "get_current_movie.json", DOMAIN + ) + client.is_on.return_value = True + client.get_brightness.return_value = {"mode": "enabled", "value": 10} + client.host = "192.168.0.123" + yield client - async def func() -> ClientMock: - mock = ClientMock() - with patch("homeassistant.components.twinkly.Twinkly", return_value=mock): - assert await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() - return mock - return func +@pytest.fixture +def mock_setup_entry() -> Generator[None]: + """Mock setting up a config entry.""" + with patch("homeassistant.components.twinkly.async_setup_entry", return_value=True): + yield diff --git a/tests/components/twinkly/const.py b/tests/components/twinkly/const.py new file mode 100644 index 00000000000..c2530f1a19d --- /dev/null +++ b/tests/components/twinkly/const.py @@ -0,0 +1,5 @@ +"""Constants for the Twinkly tests.""" + +TEST_MAC = "00:2d:13:3b:aa:bb" +TEST_NAME = "Tree 1" +TEST_MODEL = "TW2016" diff --git a/tests/components/twinkly/fixtures/get_current_movie.json b/tests/components/twinkly/fixtures/get_current_movie.json new file mode 100644 index 00000000000..2572ae5fe7c --- /dev/null +++ b/tests/components/twinkly/fixtures/get_current_movie.json @@ -0,0 +1,3 @@ +{ + "id": 1 +} diff --git a/tests/components/twinkly/fixtures/get_details.json b/tests/components/twinkly/fixtures/get_details.json new file mode 100644 index 00000000000..1519520b0b9 --- /dev/null +++ b/tests/components/twinkly/fixtures/get_details.json @@ -0,0 +1,23 @@ +{ + "product_name": "Twinkly", + "product_version": "1", + "hardware_version": "1", + "flash_size": 4, + "led_type": 1, + "led_version": "1", + "product_code": "TW2016", + "device_name": "Tree 1", + "uptime": "4087441", + "rssi": -78, + "hw_id": "002d133b", + "mac": "00:2d:13:3b:aa:bb", + "uuid": "00000000-0000-0000-0000-000000000000", + "max_supported_led": 100, + "base_leds_number": 100, + "number_of_led": 100, + "led_profile": "RGB", + "frame_rate": 14, + "movie_capacity": 708, + "copyright": "LEDWORKS 2017", + "code": 1000 +} diff --git a/tests/components/twinkly/fixtures/get_firmware_version.json b/tests/components/twinkly/fixtures/get_firmware_version.json new file mode 100644 index 00000000000..4f3df8b9ed1 --- /dev/null +++ b/tests/components/twinkly/fixtures/get_firmware_version.json @@ -0,0 +1 @@ +{ "version": "2.7.2" } diff --git a/tests/components/twinkly/fixtures/get_saved_movies.json b/tests/components/twinkly/fixtures/get_saved_movies.json new file mode 100644 index 00000000000..0ee21f3254d --- /dev/null +++ b/tests/components/twinkly/fixtures/get_saved_movies.json @@ -0,0 +1,4 @@ +[ + { "id": 1, "name": "Rainbow" }, + { "id": 2, "name": "Flare" } +] diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index abd923dcb83..e9c89754ab7 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -3,35 +3,64 @@ dict({ 'attributes': dict({ 'brightness': 26, - 'color_mode': 'brightness', + 'color_mode': 'rgb', 'effect': None, 'effect_list': list([ ]), - 'friendly_name': 'twinkly_test_device_name', + 'friendly_name': 'Tree 1', + 'hs_color': list([ + 0.0, + 0.0, + ]), + 'rgb_color': list([ + 255, + 255, + 255, + ]), 'supported_color_modes': list([ - 'brightness', + 'rgb', ]), 'supported_features': 4, + 'xy_color': list([ + 0.323, + 0.329, + ]), }), 'device_info': dict({ - 'device_name': 'twinkly_test_device_name', + 'base_leds_number': 100, + 'code': 1000, + 'copyright': 'LEDWORKS 2017', + 'device_name': 'Tree 1', + 'flash_size': 4, + 'frame_rate': 14, + 'hardware_version': '1', + 'hw_id': '002d133b', + 'led_profile': 'RGB', + 'led_type': 1, + 'led_version': '1', 'mac': '**REDACTED**', - 'product_code': 'twinkly_test_device_model', - 'uuid': '4c8fccf5-e08a-4173-92d5-49bf479252a2', + 'max_supported_led': 100, + 'movie_capacity': 708, + 'number_of_led': 100, + 'product_code': 'TW2016', + 'product_name': 'Twinkly', + 'product_version': '1', + 'rssi': -78, + 'uptime': '4087441', + 'uuid': '00000000-0000-0000-0000-000000000000', }), 'entry': dict({ 'data': dict({ - 'device_name': 'twinkly_test_device_name', 'host': '**REDACTED**', - 'id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', - 'model': 'twinkly_test_device_model', - 'name': 'twinkly_test_device_name', + 'id': '497dcba3-ecbf-4587-a2dd-5eb0665e6880', + 'model': 'TW2016', + 'name': 'Tree 1', }), 'disabled_by': None, 'discovery_keys': dict({ }), 'domain': 'twinkly', - 'entry_id': '4c8fccf5-e08a-4173-92d5-49bf479252a2', + 'entry_id': '01JFMME2P6RA38V5AMPCJ2JYYV', 'minor_version': 2, 'options': dict({ }), @@ -39,9 +68,9 @@ 'pref_disable_polling': False, 'source': 'user', 'title': 'Twinkly', - 'unique_id': 'aa:bb:cc:dd:ee:ff', + 'unique_id': '00:2d:13:3b:aa:bb', 'version': 1, }), - 'sw_version': '2.8.10', + 'sw_version': '2.7.2', }) # --- diff --git a/tests/components/twinkly/snapshots/test_light.ambr b/tests/components/twinkly/snapshots/test_light.ambr new file mode 100644 index 00000000000..ac4e275a0a1 --- /dev/null +++ b/tests/components/twinkly/snapshots/test_light.ambr @@ -0,0 +1,75 @@ +# serializer version: 1 +# name: test_entities[light.tree_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tree_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'twinkly', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'light', + 'unique_id': '00:2d:13:3b:aa:bb', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[light.tree_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 26, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + ]), + 'friendly_name': 'Tree 1', + 'hs_color': tuple( + 0.0, + 0.0, + ), + 'rgb_color': tuple( + 255, + 255, + 255, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.323, + 0.329, + ), + }), + 'context': , + 'entity_id': 'light.tree_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/twinkly/test_config_flow.py b/tests/components/twinkly/test_config_flow.py index 8d8e955291e..2b61b26fe0c 100644 --- a/tests/components/twinkly/test_config_flow.py +++ b/tests/components/twinkly/test_config_flow.py @@ -1,196 +1,170 @@ """Tests for the config_flow of the twinly component.""" -from unittest.mock import patch +from unittest.mock import AsyncMock + +import pytest -from homeassistant import config_entries from homeassistant.components import dhcp -from homeassistant.components.twinkly.const import DOMAIN as TWINKLY_DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.components.twinkly.const import DOMAIN +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import TEST_MODEL, TEST_NAME, ClientMock +from .const import TEST_MAC, TEST_MODEL, TEST_NAME from tests.common import MockConfigEntry -async def test_invalid_host(hass: HomeAssistant) -> None: - """Test the failure when invalid host provided.""" - client = ClientMock() - client.is_offline = True - with patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client - ): - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "dummy"}, - ) +@pytest.mark.usefixtures("mock_twinkly_client", "mock_setup_entry") +async def test_full_flow(hass: HomeAssistant) -> None: + """Test the full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: "192.168.0.123", + CONF_ID: "00000000-0000-0000-0000-000000000000", + CONF_NAME: TEST_NAME, + CONF_MODEL: TEST_MODEL, + } + assert result["result"].unique_id == TEST_MAC + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_exceptions(hass: HomeAssistant, mock_twinkly_client: AsyncMock) -> None: + """Test the failure when raising exceptions.""" + mock_twinkly_client.get_details.side_effect = TimeoutError + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {CONF_HOST: "cannot_connect"} + mock_twinkly_client.get_details.side_effect = None -async def test_success_flow(hass: HomeAssistant) -> None: - """Test that an entity is created when the flow completes.""" - client = ClientMock() - with ( - patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client - ), - patch("homeassistant.components.twinkly.async_setup_entry", return_value=True), - ): - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "dummy"}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == TEST_NAME - assert result["data"] == { - CONF_HOST: "dummy", - CONF_ID: client.id, - CONF_NAME: TEST_NAME, - CONF_MODEL: TEST_MODEL, - } - - -async def test_dhcp_can_confirm(hass: HomeAssistant) -> None: - """Test DHCP discovery flow can confirm right away.""" - client = ClientMock() - with patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client - ): - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - hostname="Twinkly_XYZ", - ip="1.2.3.4", - macaddress="aabbccddeeff", - ), - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "discovery_confirm" - - -async def test_dhcp_success(hass: HomeAssistant) -> None: - """Test DHCP discovery flow success.""" - client = ClientMock() - with ( - patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client - ), - patch("homeassistant.components.twinkly.async_setup_entry", return_value=True), - ): - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - hostname="Twinkly_XYZ", - ip="1.2.3.4", - macaddress="aabbccddeeff", - ), - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "discovery_confirm" - - result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == TEST_NAME - assert result["data"] == { - CONF_HOST: "1.2.3.4", - CONF_ID: client.id, - CONF_NAME: TEST_NAME, - CONF_MODEL: TEST_MODEL, - } - - -async def test_dhcp_already_exists(hass: HomeAssistant) -> None: - """Test DHCP discovery flow that fails to connect.""" - client = ClientMock() - - entry = MockConfigEntry( - domain=TWINKLY_DOMAIN, - data={ - CONF_HOST: "1.2.3.4", - CONF_ID: client.id, - CONF_NAME: TEST_NAME, - CONF_MODEL: TEST_MODEL, - }, - unique_id=client.id, + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.0.123"}, ) - entry.add_to_hass(hass) + assert result["type"] is FlowResultType.CREATE_ENTRY - with patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client - ): - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - hostname="Twinkly_XYZ", - ip="1.2.3.4", - macaddress="aabbccddeeff", - ), - ) - await hass.async_block_till_done() +@pytest.mark.usefixtures("mock_twinkly_client", "mock_setup_entry") +async def test_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test the device is already configured.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "192.168.0.123"} + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_twinkly_client", "mock_setup_entry") +async def test_dhcp_full_flow(hass: HomeAssistant) -> None: + """Test DHCP discovery flow can confirm right away.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + hostname="Twinkly_XYZ", + ip="1.2.3.4", + macaddress="002d133baabb", + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_NAME + assert result["data"] == { + CONF_HOST: "1.2.3.4", + CONF_ID: "00000000-0000-0000-0000-000000000000", + CONF_NAME: TEST_NAME, + CONF_MODEL: TEST_MODEL, + } + assert result["result"].unique_id == TEST_MAC + + +@pytest.mark.usefixtures("mock_twinkly_client") +async def test_dhcp_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test DHCP discovery flow aborts if entry already setup.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + hostname="Twinkly_XYZ", + ip="1.2.3.4", + macaddress="002d133baabb", + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" + + +@pytest.mark.usefixtures("mock_twinkly_client", "mock_setup_entry") async def test_user_flow_works_discovery(hass: HomeAssistant) -> None: """Test user flow can continue after discovery happened.""" - client = ClientMock() - with ( - patch( - "homeassistant.components.twinkly.config_flow.Twinkly", return_value=client + await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + hostname="Twinkly_XYZ", + ip="1.2.3.4", + macaddress="002d133baabb", ), - patch("homeassistant.components.twinkly.async_setup_entry", return_value=True), - ): - await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, - context={"source": config_entries.SOURCE_DHCP}, - data=dhcp.DhcpServiceInfo( - hostname="Twinkly_XYZ", - ip="1.2.3.4", - macaddress="aabbccddeeff", - ), - ) - result = await hass.config_entries.flow.async_init( - TWINKLY_DOMAIN, - context={"source": SOURCE_USER}, - ) - assert len(hass.config_entries.flow.async_progress(TWINKLY_DOMAIN)) == 2 - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert len(hass.config_entries.flow.async_progress(DOMAIN)) == 2 + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_HOST: "10.0.0.131"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "10.0.0.131"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY - # Verify the discovery flow was aborted - assert not hass.config_entries.flow.async_progress(TWINKLY_DOMAIN) + # Verify the discovery flow was aborted + assert not hass.config_entries.flow.async_progress(DOMAIN) diff --git a/tests/components/twinkly/test_diagnostics.py b/tests/components/twinkly/test_diagnostics.py index f9cf0bc562c..d7ef4dd9b11 100644 --- a/tests/components/twinkly/test_diagnostics.py +++ b/tests/components/twinkly/test_diagnostics.py @@ -1,32 +1,28 @@ """Tests for the diagnostics of the twinkly component.""" -from collections.abc import Awaitable, Callable - +import pytest from syrupy import SnapshotAssertion from syrupy.filters import props from homeassistant.core import HomeAssistant -from . import ClientMock +from . import setup_integration +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator -type ComponentSetup = Callable[[], Awaitable[ClientMock]] - -DOMAIN = "twinkly" - +@pytest.mark.usefixtures("mock_twinkly_client") async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup_integration: ComponentSetup, + mock_config_entry: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" - await setup_integration() - entry = hass.config_entries.async_entries(DOMAIN)[0] + await setup_integration(hass, mock_config_entry) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( - exclude=props("created_at", "modified_at") - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/twinkly/test_init.py b/tests/components/twinkly/test_init.py index 60ebe65b445..0a76a399b63 100644 --- a/tests/components/twinkly/test_init.py +++ b/tests/components/twinkly/test_init.py @@ -1,7 +1,9 @@ """Tests of the initialization of the twinkly integration.""" -from unittest.mock import patch -from uuid import uuid4 +from unittest.mock import AsyncMock + +from aiohttp import ClientConnectionError +import pytest from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.twinkly.const import DOMAIN @@ -10,82 +12,55 @@ from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from . import TEST_HOST, TEST_MAC, TEST_MODEL, TEST_NAME_ORIGINAL, ClientMock +from . import setup_integration +from .const import TEST_MAC, TEST_MODEL from tests.common import MockConfigEntry -async def test_load_unload_entry(hass: HomeAssistant) -> None: - """Validate that setup entry also configure the client.""" - client = ClientMock() +@pytest.mark.usefixtures("mock_twinkly_client") +async def test_load_unload_entry( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test the load/unload of the config entry.""" - device_id = str(uuid4()) - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: TEST_HOST, - CONF_ID: device_id, - CONF_NAME: TEST_NAME_ORIGINAL, - CONF_MODEL: TEST_MODEL, - }, - entry_id=device_id, - unique_id=TEST_MAC, - minor_version=2, - ) + await setup_integration(hass, mock_config_entry) - config_entry.add_to_hass(hass) + assert mock_config_entry.state is ConfigEntryState.LOADED - with patch("homeassistant.components.twinkly.Twinkly", return_value=client): - await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_unload(mock_config_entry.entry_id) - assert config_entry.state is ConfigEntryState.LOADED - - await hass.config_entries.async_unload(config_entry.entry_id) - - assert config_entry.state is ConfigEntryState.NOT_LOADED + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED -async def test_config_entry_not_ready(hass: HomeAssistant) -> None: +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Validate that config entry is retried.""" - client = ClientMock() - client.is_offline = True + mock_twinkly_client.get_details.side_effect = ClientConnectionError - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - CONF_HOST: TEST_HOST, - CONF_ID: id, - CONF_NAME: TEST_NAME_ORIGINAL, - CONF_MODEL: TEST_MODEL, - }, - minor_version=2, - unique_id=TEST_MAC, - ) + await setup_integration(hass, mock_config_entry) - config_entry.add_to_hass(hass) - - with patch("homeassistant.components.twinkly.Twinkly", return_value=client): - await hass.config_entries.async_setup(config_entry.entry_id) - - assert config_entry.state is ConfigEntryState.SETUP_RETRY + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY +@pytest.mark.usefixtures("mock_twinkly_client") async def test_mac_migration( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, ) -> None: """Validate that the unique_id is migrated to the MAC address.""" - client = ClientMock() - config_entry = MockConfigEntry( domain=DOMAIN, minor_version=1, unique_id="unique_id", data={ - CONF_HOST: TEST_HOST, + CONF_HOST: "192.168.0.123", CONF_ID: id, - CONF_NAME: TEST_NAME_ORIGINAL, + CONF_NAME: "Tree 1", CONF_MODEL: TEST_MODEL, }, ) @@ -100,8 +75,7 @@ async def test_mac_migration( identifiers={(DOMAIN, config_entry.unique_id)}, ) - with patch("homeassistant.components.twinkly.Twinkly", return_value=client): - await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/twinkly/test_light.py b/tests/components/twinkly/test_light.py index 26df83aebe0..c008ab51ef7 100644 --- a/tests/components/twinkly/test_light.py +++ b/tests/components/twinkly/test_light.py @@ -3,290 +3,287 @@ from __future__ import annotations from datetime import timedelta -from unittest.mock import patch +from typing import Any +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.light import ATTR_BRIGHTNESS, LightEntityFeature -from homeassistant.components.twinkly.const import DOMAIN as TWINKLY_DOMAIN -from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_EFFECT, + ATTR_RGB_COLOR, + ATTR_RGBW_COLOR, + DOMAIN as LIGHT_DOMAIN, + LightEntityFeature, +) +from homeassistant.components.twinkly import DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_SUPPORTED_FEATURES, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.device_registry import DeviceEntry -from homeassistant.helpers.entity_registry import RegistryEntry -from . import TEST_MAC, TEST_MODEL, TEST_NAME, TEST_NAME_ORIGINAL, ClientMock +from . import setup_integration +from .const import TEST_MAC -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -async def test_initial_state(hass: HomeAssistant) -> None: - """Validate that entity and device states are updated on startup.""" - entity, device, _, _ = await _create_entries(hass) +@pytest.mark.usefixtures("mock_twinkly_client") +async def test_entities( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the created entities.""" + with patch("homeassistant.components.twinkly.PLATFORMS", [Platform.LIGHT]): + await setup_integration(hass, mock_config_entry) - state = hass.states.get(entity.entity_id) - - # Basic state properties - assert state.name == TEST_NAME - assert state.state == "on" - assert state.attributes[ATTR_BRIGHTNESS] == 26 - assert state.attributes["friendly_name"] == TEST_NAME - - assert device.name == TEST_NAME - assert device.model == TEST_MODEL - assert device.manufacturer == "LEDWORKS" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -async def test_turn_on_off(hass: HomeAssistant) -> None: +async def test_turn_on_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_on service.""" - client = ClientMock() - client.state = False - client.brightness = {"mode": "enabled", "value": 20} - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("light.tree_1").state == STATE_OFF await hass.services.async_call( - "light", "turn_on", service_data={"entity_id": entity.entity_id}, blocking=True + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1"}, + blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert state.attributes[ATTR_BRIGHTNESS] == 51 + mock_twinkly_client.turn_on.assert_called_once_with() -async def test_turn_on_with_brightness(hass: HomeAssistant) -> None: +async def test_turn_on_with_brightness( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_on service with a brightness parameter.""" - client = ClientMock() - client.state = False - client.brightness = {"mode": "enabled", "value": 20} - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "brightness": 255}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1", ATTR_BRIGHTNESS: 255}, blocking=True, ) - state = hass.states.get(entity.entity_id) + mock_twinkly_client.set_brightness.assert_called_once_with(100) + mock_twinkly_client.turn_on.assert_called_once_with() - assert state.state == "on" - assert state.attributes[ATTR_BRIGHTNESS] == 255 + +async def test_brightness_to_zero( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: + """Test support of the light.turn_on service with a brightness parameter.""" + await setup_integration(hass, mock_config_entry) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "brightness": 1}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1", ATTR_BRIGHTNESS: 1}, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "off" + mock_twinkly_client.set_brightness.assert_not_called() + mock_twinkly_client.turn_off.assert_called_once_with() -async def test_turn_on_with_color_rgbw(hass: HomeAssistant) -> None: +async def test_turn_on_with_color_rgbw( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_on service with a rgbw parameter.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGBW" - client.brightness = {"mode": "enabled", "value": 255} - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False + mock_twinkly_client.get_details.return_value["led_profile"] = "RGBW" - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) assert ( LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] + & hass.states.get("light.tree_1").attributes[ATTR_SUPPORTED_FEATURES] ) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "rgbw_color": (128, 64, 32, 0)}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ + ATTR_ENTITY_ID: "light.tree_1", + ATTR_RGBW_COLOR: (128, 64, 32, 0), + }, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert client.color == (128, 64, 32) - assert client.default_mode == "color" - assert client.mode == "color" + mock_twinkly_client.interview.assert_called_once_with() + mock_twinkly_client.set_static_colour.assert_called_once_with((128, 64, 32)) + mock_twinkly_client.set_mode.assert_called_once_with("color") + assert mock_twinkly_client.default_mode == "color" -async def test_turn_on_with_color_rgb(hass: HomeAssistant) -> None: +async def test_turn_on_with_color_rgb( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_on service with a rgb parameter.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGB" - client.brightness = {"mode": "enabled", "value": 255} - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False + mock_twinkly_client.get_details.return_value["led_profile"] = "RGB" - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) assert ( LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] + & hass.states.get("light.tree_1").attributes[ATTR_SUPPORTED_FEATURES] ) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "rgb_color": (128, 64, 32)}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1", ATTR_RGB_COLOR: (128, 64, 32)}, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert client.color == (128, 64, 32) - assert client.default_mode == "color" - assert client.mode == "color" + mock_twinkly_client.interview.assert_called_once_with() + mock_twinkly_client.set_static_colour.assert_called_once_with((128, 64, 32)) + mock_twinkly_client.set_mode.assert_called_once_with("color") + assert mock_twinkly_client.default_mode == "color" -async def test_turn_on_with_effect(hass: HomeAssistant) -> None: +async def test_turn_on_with_effect( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_on service with effects.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGB" - client.brightness = {"mode": "enabled", "value": 255} - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False + mock_twinkly_client.get_details.return_value["led_profile"] = "RGB" - assert hass.states.get(entity.entity_id).state == "off" - assert not client.current_movie + await setup_integration(hass, mock_config_entry) assert ( LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] + & hass.states.get("light.tree_1").attributes[ATTR_SUPPORTED_FEATURES] ) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "effect": "1 Rainbow"}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1", ATTR_EFFECT: "2 Rainbow"}, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert client.current_movie["id"] == 1 - assert client.default_mode == "movie" - assert client.mode == "movie" + mock_twinkly_client.interview.assert_called_once_with() + mock_twinkly_client.set_current_movie.assert_called_once_with(2) + mock_twinkly_client.set_mode.assert_called_once_with("movie") + assert mock_twinkly_client.default_mode == "movie" -async def test_turn_on_with_color_rgbw_and_missing_effect(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("data"), + [ + {ATTR_RGBW_COLOR: (128, 64, 32, 0)}, + {ATTR_RGB_COLOR: (128, 64, 32)}, + ], +) +async def test_turn_on_with_missing_effect( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, + data: dict[str, Any], +) -> None: """Test support of the light.turn_on service with rgbw color and missing effect support.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGBW" - client.brightness = {"mode": "enabled", "value": 255} - client.version = "2.7.0" - entity, _, _, _ = await _create_entries(hass, client) + mock_twinkly_client.is_on.return_value = False + mock_twinkly_client.get_firmware_version.return_value["version"] = "2.7.0" - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) assert ( - not LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] + LightEntityFeature.EFFECT + ^ hass.states.get("light.tree_1").attributes[ATTR_SUPPORTED_FEATURES] ) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "rgbw_color": (128, 64, 32, 0)}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1"} | data, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert client.color == (0, 128, 64, 32) - assert client.mode == "movie" - assert client.default_mode == "movie" + mock_twinkly_client.interview.assert_called_once_with() + mock_twinkly_client.set_cycle_colours.assert_called_once_with((128, 64, 32)) + mock_twinkly_client.set_mode.assert_called_once_with("movie") + assert mock_twinkly_client.default_mode == "movie" + mock_twinkly_client.set_current_movie.assert_not_called() -async def test_turn_on_with_color_rgb_and_missing_effect(hass: HomeAssistant) -> None: - """Test support of the light.turn_on service with rgb color and missing effect support.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGB" - client.brightness = {"mode": "enabled", "value": 255} - client.version = "2.7.0" - entity, _, _, _ = await _create_entries(hass, client) +async def test_turn_on_with_color_rgbw_and_missing_effect( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: + """Test support of the light.turn_on service with missing effect support.""" + mock_twinkly_client.is_on.return_value = False + mock_twinkly_client.get_firmware_version.return_value["version"] = "2.7.0" - assert hass.states.get(entity.entity_id).state == "off" + await setup_integration(hass, mock_config_entry) assert ( - not LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] + LightEntityFeature.EFFECT + ^ hass.states.get("light.tree_1").attributes[ATTR_SUPPORTED_FEATURES] ) await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "rgb_color": (128, 64, 32)}, + LIGHT_DOMAIN, + SERVICE_TURN_ON, + service_data={ATTR_ENTITY_ID: "light.tree_1", ATTR_EFFECT: "2 Rainbow"}, blocking=True, ) - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert client.color == (128, 64, 32) - assert client.mode == "movie" - assert client.default_mode == "movie" + mock_twinkly_client.set_current_movie.assert_not_called() -async def test_turn_on_with_effect_missing_effects(hass: HomeAssistant) -> None: - """Test support of the light.turn_on service with effect set even if effects are not supported.""" - client = ClientMock() - client.state = False - client.device_info["led_profile"] = "RGB" - client.brightness = {"mode": "enabled", "value": 255} - client.version = "2.7.0" - entity, _, _, _ = await _create_entries(hass, client) - - assert hass.states.get(entity.entity_id).state == "off" - assert not client.current_movie - assert ( - not LightEntityFeature.EFFECT - & hass.states.get(entity.entity_id).attributes["supported_features"] - ) - - await hass.services.async_call( - "light", - "turn_on", - service_data={"entity_id": entity.entity_id, "effect": "1 Rainbow"}, - blocking=True, - ) - - state = hass.states.get(entity.entity_id) - - assert state.state == "on" - assert not client.current_movie - assert client.default_mode == "movie" - assert client.mode == "movie" - - -async def test_turn_off(hass: HomeAssistant) -> None: +async def test_turn_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, +) -> None: """Test support of the light.turn_off service.""" - entity, _, _, _ = await _create_entries(hass) - - assert hass.states.get(entity.entity_id).state == "on" + await setup_integration(hass, mock_config_entry) await hass.services.async_call( - "light", "turn_off", service_data={"entity_id": entity.entity_id}, blocking=True + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + service_data={ATTR_ENTITY_ID: "light.tree_1"}, + blocking=True, ) - - state = hass.states.get(entity.entity_id) - - assert state.state == "off" + mock_twinkly_client.turn_off.assert_called_once_with() async def test_update_name( hass: HomeAssistant, device_registry: dr.DeviceRegistry, freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, ) -> None: """Validate device's name update behavior. @@ -294,56 +291,15 @@ async def test_update_name( then the name of the entity is updated and it's also persisted, so it can be restored when starting HA while Twinkly is offline. """ - entity, _, client, config_entry = await _create_entries(hass) - client.change_name("new_device_name") + await setup_integration(hass, mock_config_entry) + + mock_twinkly_client.get_details.return_value["device_name"] = "new_device_name" + freezer.tick(timedelta(seconds=30)) async_fire_time_changed(hass) await hass.async_block_till_done() - dev_entry = device_registry.async_get_device({(TWINKLY_DOMAIN, TEST_MAC)}) + dev_entry = device_registry.async_get_device({(DOMAIN, TEST_MAC)}) assert dev_entry.name == "new_device_name" - assert config_entry.data[CONF_NAME] == "new_device_name" - - -async def test_unload(hass: HomeAssistant) -> None: - """Validate that entities can be unloaded from the UI.""" - - _, _, _, entry = await _create_entries(hass) - - assert await hass.config_entries.async_unload(entry.entry_id) - - -async def _create_entries( - hass: HomeAssistant, client=None -) -> tuple[RegistryEntry, DeviceEntry, ClientMock]: - client = ClientMock() if client is None else client - - with patch("homeassistant.components.twinkly.Twinkly", return_value=client): - config_entry = MockConfigEntry( - domain=TWINKLY_DOMAIN, - data={ - CONF_HOST: client, - CONF_ID: client.id, - CONF_NAME: TEST_NAME_ORIGINAL, - CONF_MODEL: TEST_MODEL, - }, - unique_id=TEST_MAC, - minor_version=2, - ) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - device_registry = dr.async_get(hass) - entity_registry = er.async_get(hass) - - entity_id = entity_registry.async_get_entity_id("light", TWINKLY_DOMAIN, TEST_MAC) - entity_entry = entity_registry.async_get(entity_id) - device = device_registry.async_get_device(identifiers={(TWINKLY_DOMAIN, TEST_MAC)}) - - assert entity_entry is not None - assert device is not None - - return entity_entry, device, client, config_entry From 88eb550ec165177d358685b1cfea125052ba1e03 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sun, 22 Dec 2024 12:01:13 +0100 Subject: [PATCH 612/677] Update quality-scale status for enphase_envoy config_flow missing data descriptions (#133726) --- homeassistant/components/enphase_envoy/quality_scale.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 171c07e9474..210491c031c 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -17,7 +17,6 @@ rules: status: todo comment: | - async_step_reaut L160: I believe that the unique is already set when starting a reauth flow - - The config flow is missing data descriptions for the other fields dependency-transparency: done docs-actions: status: done From cdd73a5c5a0a0573182ce5e72294ee3551d5d4a0 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 12:16:08 +0100 Subject: [PATCH 613/677] Set parallel updates for Peblar Rocksolid EV Chargers integration (#133786) --- homeassistant/components/peblar/binary_sensor.py | 2 ++ homeassistant/components/peblar/button.py | 2 ++ homeassistant/components/peblar/number.py | 2 ++ homeassistant/components/peblar/quality_scale.yaml | 2 +- homeassistant/components/peblar/select.py | 2 ++ homeassistant/components/peblar/sensor.py | 2 ++ homeassistant/components/peblar/switch.py | 2 ++ homeassistant/components/peblar/update.py | 2 ++ 8 files changed, 15 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/peblar/binary_sensor.py b/homeassistant/components/peblar/binary_sensor.py index f28a02422a9..5b65a8e976d 100644 --- a/homeassistant/components/peblar/binary_sensor.py +++ b/homeassistant/components/peblar/binary_sensor.py @@ -19,6 +19,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class PeblarBinarySensorEntityDescription(BinarySensorEntityDescription): diff --git a/homeassistant/components/peblar/button.py b/homeassistant/components/peblar/button.py index 0b0f12be1b3..68d871c8298 100644 --- a/homeassistant/components/peblar/button.py +++ b/homeassistant/components/peblar/button.py @@ -22,6 +22,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class PeblarButtonEntityDescription(ButtonEntityDescription): diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py index a5e926714d9..32647199b8b 100644 --- a/homeassistant/components/peblar/number.py +++ b/homeassistant/components/peblar/number.py @@ -27,6 +27,8 @@ from .coordinator import ( PeblarRuntimeData, ) +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class PeblarNumberEntityDescription(NumberEntityDescription): diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index 78ec3718caf..aea83da25ab 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -38,7 +38,7 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: todo + parallel-updates: done reauthentication-flow: done test-coverage: todo # Gold diff --git a/homeassistant/components/peblar/select.py b/homeassistant/components/peblar/select.py index 95a87248804..d2c74f482c6 100644 --- a/homeassistant/components/peblar/select.py +++ b/homeassistant/components/peblar/select.py @@ -18,6 +18,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class PeblarSelectEntityDescription(SelectEntityDescription): diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index 233417051cb..df8cac13bcc 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -34,6 +34,8 @@ from .const import ( ) from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class PeblarSensorDescription(SensorEntityDescription): diff --git a/homeassistant/components/peblar/switch.py b/homeassistant/components/peblar/switch.py index 9a6788a62be..3b7ab591508 100644 --- a/homeassistant/components/peblar/switch.py +++ b/homeassistant/components/peblar/switch.py @@ -23,6 +23,8 @@ from .coordinator import ( PeblarRuntimeData, ) +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class PeblarSwitchEntityDescription(SwitchEntityDescription): diff --git a/homeassistant/components/peblar/update.py b/homeassistant/components/peblar/update.py index cc0f1ee0c79..37f20722b98 100644 --- a/homeassistant/components/peblar/update.py +++ b/homeassistant/components/peblar/update.py @@ -22,6 +22,8 @@ from .coordinator import ( PeblarVersionInformation, ) +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class PeblarUpdateEntityDescription(UpdateEntityDescription): From 56b58cec3e26f3485026f47bb45ec744e62636d0 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 12:17:09 +0100 Subject: [PATCH 614/677] Fix errors in HitachiDHW in Overkiz (#133765) * Small changes to fix errors in DHW * Update * Bugfix in float/int mistake * Fix typing * Fix code style * Fix mypy --- .../components/overkiz/water_heater/hitachi_dhw.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/overkiz/water_heater/hitachi_dhw.py b/homeassistant/components/overkiz/water_heater/hitachi_dhw.py index dc2a93a8d2f..988c66afdb0 100644 --- a/homeassistant/components/overkiz/water_heater/hitachi_dhw.py +++ b/homeassistant/components/overkiz/water_heater/hitachi_dhw.py @@ -48,8 +48,10 @@ class HitachiDHW(OverkizEntity, WaterHeaterEntity): def current_temperature(self) -> float | None: """Return the current temperature.""" current_temperature = self.device.states[OverkizState.CORE_DHW_TEMPERATURE] - if current_temperature: - return current_temperature.value_as_float + + if current_temperature and current_temperature.value_as_int: + return float(current_temperature.value_as_int) + return None @property @@ -58,13 +60,14 @@ class HitachiDHW(OverkizEntity, WaterHeaterEntity): target_temperature = self.device.states[ OverkizState.MODBUS_CONTROL_DHW_SETTING_TEMPERATURE ] - if target_temperature: - return target_temperature.value_as_float + + if target_temperature and target_temperature.value_as_int: + return float(target_temperature.value_as_int) + return None async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" - await self.executor.async_execute_command( OverkizCommand.SET_CONTROL_DHW_SETTING_TEMPERATURE, int(kwargs[ATTR_TEMPERATURE]), From 5ef3901b440503941f73b1e71df718e3c9b2d60c Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 22 Dec 2024 13:32:15 +0100 Subject: [PATCH 615/677] Add base entity for Niko Home Control (#133744) --- .../components/niko_home_control/entity.py | 50 +++++++++++++++++++ .../components/niko_home_control/light.py | 25 +++------- 2 files changed, 58 insertions(+), 17 deletions(-) create mode 100644 homeassistant/components/niko_home_control/entity.py diff --git a/homeassistant/components/niko_home_control/entity.py b/homeassistant/components/niko_home_control/entity.py new file mode 100644 index 00000000000..fe14e09d957 --- /dev/null +++ b/homeassistant/components/niko_home_control/entity.py @@ -0,0 +1,50 @@ +"""Base class for Niko Home Control entities.""" + +from abc import abstractmethod + +from nhc.action import NHCAction +from nhc.controller import NHCController + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + + +class NikoHomeControlEntity(Entity): + """Base class for Niko Home Control entities.""" + + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__( + self, action: NHCAction, controller: NHCController, unique_id: str + ) -> None: + """Set up the Niko Home Control entity.""" + self._controller = controller + self._action = action + self._attr_unique_id = unique_id = f"{unique_id}-{action.id}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + manufacturer="Niko", + name=action.name, + suggested_area=action.suggested_area, + ) + self.update_state() + + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + self.async_on_remove( + self._controller.register_callback( + self._action.id, self.async_update_callback + ) + ) + + async def async_update_callback(self, state: int) -> None: + """Handle updates from the controller.""" + self.update_state() + self.async_write_ha_state() + + @abstractmethod + def update_state(self) -> None: + """Update the state of the entity.""" diff --git a/homeassistant/components/niko_home_control/light.py b/homeassistant/components/niko_home_control/light.py index 29b952fcb77..c9902cbf11b 100644 --- a/homeassistant/components/niko_home_control/light.py +++ b/homeassistant/components/niko_home_control/light.py @@ -25,6 +25,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import NHCController, NikoHomeControlConfigEntry from .const import DOMAIN +from .entity import NikoHomeControlEntity # delete after 2025.7.0 PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend({vol.Required(CONF_HOST): cv.string}) @@ -91,33 +92,23 @@ async def async_setup_entry( ) -class NikoHomeControlLight(LightEntity): +class NikoHomeControlLight(NikoHomeControlEntity, LightEntity): """Representation of a Niko Light.""" + _attr_name = None + _action: NHCLight + def __init__( self, action: NHCLight, controller: NHCController, unique_id: str ) -> None: """Set up the Niko Home Control light platform.""" - self._controller = controller - self._action = action - self._attr_unique_id = f"{unique_id}-{action.id}" - self._attr_name = action.name - self._attr_is_on = action.is_on + super().__init__(action, controller, unique_id) self._attr_color_mode = ColorMode.ONOFF self._attr_supported_color_modes = {ColorMode.ONOFF} - self._attr_should_poll = False if action.is_dimmable: self._attr_color_mode = ColorMode.BRIGHTNESS self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} - async def async_added_to_hass(self) -> None: - """Subscribe to updates.""" - self.async_on_remove( - self._controller.register_callback( - self._action.id, self.async_update_callback - ) - ) - def turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS, 255) / 2.55) @@ -126,9 +117,9 @@ class NikoHomeControlLight(LightEntity): """Instruct the light to turn off.""" self._action.turn_off() - async def async_update_callback(self, state: int) -> None: + def update_state(self) -> None: """Handle updates from the controller.""" + state = self._action.state self._attr_is_on = state > 0 if brightness_supported(self.supported_color_modes): self._attr_brightness = round(state * 2.55) - self.async_write_ha_state() From 1e68ae1bb8f0811de4402e65f631dc0e1c989171 Mon Sep 17 00:00:00 2001 From: PierreAronnax Date: Sun, 22 Dec 2024 13:35:36 +0100 Subject: [PATCH 616/677] Remove myself from govee_ble codeowners (#133790) --- CODEOWNERS | 4 ++-- homeassistant/components/govee_ble/manifest.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 0e2934b1f49..8ab0994cdac 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -578,8 +578,8 @@ build.json @home-assistant/supervisor /tests/components/google_tasks/ @allenporter /homeassistant/components/google_travel_time/ @eifinger /tests/components/google_travel_time/ @eifinger -/homeassistant/components/govee_ble/ @bdraco @PierreAronnax -/tests/components/govee_ble/ @bdraco @PierreAronnax +/homeassistant/components/govee_ble/ @bdraco +/tests/components/govee_ble/ @bdraco /homeassistant/components/govee_light_local/ @Galorhallen /tests/components/govee_light_local/ @Galorhallen /homeassistant/components/gpsd/ @fabaff @jrieger diff --git a/homeassistant/components/govee_ble/manifest.json b/homeassistant/components/govee_ble/manifest.json index d9827e9155c..39a66ad36a7 100644 --- a/homeassistant/components/govee_ble/manifest.json +++ b/homeassistant/components/govee_ble/manifest.json @@ -122,7 +122,7 @@ "connectable": false } ], - "codeowners": ["@bdraco", "@PierreAronnax"], + "codeowners": ["@bdraco"], "config_flow": true, "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/govee_ble", From 075f95b9c4e653f3ae1730a145ed03086fa1a473 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 14:01:31 +0100 Subject: [PATCH 617/677] Add base entity to Peblar Rocksolid EV Chargers integration (#133794) --- homeassistant/components/peblar/__init__.py | 23 +------- .../components/peblar/binary_sensor.py | 30 +++------- homeassistant/components/peblar/button.py | 25 ++------- .../components/peblar/coordinator.py | 2 +- .../components/peblar/diagnostics.py | 2 +- homeassistant/components/peblar/entity.py | 55 +++++++++++++++++++ homeassistant/components/peblar/number.py | 22 +++----- homeassistant/components/peblar/select.py | 27 ++------- homeassistant/components/peblar/sensor.py | 31 +++-------- homeassistant/components/peblar/switch.py | 29 +++------- homeassistant/components/peblar/update.py | 31 +++-------- .../peblar/snapshots/test_binary_sensor.ambr | 4 +- .../peblar/snapshots/test_select.ambr | 2 +- .../peblar/snapshots/test_switch.ambr | 2 +- 14 files changed, 111 insertions(+), 174 deletions(-) create mode 100644 homeassistant/components/peblar/entity.py diff --git a/homeassistant/components/peblar/__init__.py b/homeassistant/components/peblar/__init__.py index c185a0e2550..bf1b3ef7e66 100644 --- a/homeassistant/components/peblar/__init__.py +++ b/homeassistant/components/peblar/__init__.py @@ -16,10 +16,8 @@ from peblar import ( from homeassistant.const import CONF_HOST, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_create_clientsession -from .const import DOMAIN from .coordinator import ( PeblarConfigEntry, PeblarDataUpdateCoordinator, @@ -76,29 +74,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bo entry.runtime_data = PeblarRuntimeData( data_coordinator=meter_coordinator, system_information=system_information, - user_configuraton_coordinator=user_configuration_coordinator, + user_configuration_coordinator=user_configuration_coordinator, version_coordinator=version_coordinator, ) - # Peblar is a single device integration. Setting up the device directly - # during setup. This way we only have to reference it in all entities. - device_registry = dr.async_get(hass) - device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - configuration_url=f"http://{entry.data[CONF_HOST]}", - connections={ - (dr.CONNECTION_NETWORK_MAC, system_information.ethernet_mac_address), - (dr.CONNECTION_NETWORK_MAC, system_information.wlan_mac_address), - }, - identifiers={(DOMAIN, system_information.product_serial_number)}, - manufacturer=system_information.product_vendor_name, - model_id=system_information.product_number, - model=system_information.product_model_name, - name="Peblar EV Charger", - serial_number=system_information.product_serial_number, - sw_version=version_coordinator.data.current.firmware, - ) - # Forward the setup to the platforms await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/peblar/binary_sensor.py b/homeassistant/components/peblar/binary_sensor.py index 5b65a8e976d..e8e5095f050 100644 --- a/homeassistant/components/peblar/binary_sensor.py +++ b/homeassistant/components/peblar/binary_sensor.py @@ -12,12 +12,10 @@ from homeassistant.components.binary_sensor import ( ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator +from .entity import PeblarEntity PARALLEL_UPDATES = 0 @@ -56,35 +54,23 @@ async def async_setup_entry( ) -> None: """Set up Peblar binary sensor based on a config entry.""" async_add_entities( - PeblarBinarySensorEntity(entry=entry, description=description) + PeblarBinarySensorEntity( + entry=entry, + coordinator=entry.runtime_data.data_coordinator, + description=description, + ) for description in DESCRIPTIONS ) class PeblarBinarySensorEntity( - CoordinatorEntity[PeblarDataUpdateCoordinator], BinarySensorEntity + PeblarEntity[PeblarDataUpdateCoordinator], + BinarySensorEntity, ): """Defines a Peblar binary sensor entity.""" entity_description: PeblarBinarySensorEntityDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarBinarySensorEntityDescription, - ) -> None: - """Initialize the binary sensor entity.""" - super().__init__(entry.runtime_data.data_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}-{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - @property def is_on(self) -> bool: """Return state of the binary sensor.""" diff --git a/homeassistant/components/peblar/button.py b/homeassistant/components/peblar/button.py index 68d871c8298..66411daa228 100644 --- a/homeassistant/components/peblar/button.py +++ b/homeassistant/components/peblar/button.py @@ -15,12 +15,10 @@ from homeassistant.components.button import ( ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator +from .entity import PeblarEntity PARALLEL_UPDATES = 1 @@ -59,6 +57,7 @@ async def async_setup_entry( async_add_entities( PeblarButtonEntity( entry=entry, + coordinator=entry.runtime_data.user_configuration_coordinator, description=description, ) for description in DESCRIPTIONS @@ -66,29 +65,13 @@ async def async_setup_entry( class PeblarButtonEntity( - CoordinatorEntity[PeblarUserConfigurationDataUpdateCoordinator], ButtonEntity + PeblarEntity[PeblarUserConfigurationDataUpdateCoordinator], + ButtonEntity, ): """Defines an Peblar button.""" entity_description: PeblarButtonEntityDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarButtonEntityDescription, - ) -> None: - """Initialize the button entity.""" - super().__init__(coordinator=entry.runtime_data.user_configuraton_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - async def async_press(self) -> None: """Trigger button press on the Peblar device.""" await self.entity_description.press_fn(self.coordinator.peblar) diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index e2b16e1e62a..4afc544cc1d 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -30,7 +30,7 @@ class PeblarRuntimeData: data_coordinator: PeblarDataUpdateCoordinator system_information: PeblarSystemInformation - user_configuraton_coordinator: PeblarUserConfigurationDataUpdateCoordinator + user_configuration_coordinator: PeblarUserConfigurationDataUpdateCoordinator version_coordinator: PeblarVersionDataUpdateCoordinator diff --git a/homeassistant/components/peblar/diagnostics.py b/homeassistant/components/peblar/diagnostics.py index 32716148c3f..a8c7423f79a 100644 --- a/homeassistant/components/peblar/diagnostics.py +++ b/homeassistant/components/peblar/diagnostics.py @@ -15,7 +15,7 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" return { "system_information": entry.runtime_data.system_information.to_dict(), - "user_configuration": entry.runtime_data.user_configuraton_coordinator.data.to_dict(), + "user_configuration": entry.runtime_data.user_configuration_coordinator.data.to_dict(), "ev": entry.runtime_data.data_coordinator.data.ev.to_dict(), "meter": entry.runtime_data.data_coordinator.data.meter.to_dict(), "system": entry.runtime_data.data_coordinator.data.system.to_dict(), diff --git a/homeassistant/components/peblar/entity.py b/homeassistant/components/peblar/entity.py new file mode 100644 index 00000000000..ecfd3e8232b --- /dev/null +++ b/homeassistant/components/peblar/entity.py @@ -0,0 +1,55 @@ +"""Base entity for the Peblar integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.const import CONF_HOST +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from .const import DOMAIN +from .coordinator import PeblarConfigEntry + + +class PeblarEntity[_DataUpdateCoordinatorT: DataUpdateCoordinator[Any]]( + CoordinatorEntity[_DataUpdateCoordinatorT] +): + """Defines a Peblar entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + *, + entry: PeblarConfigEntry, + coordinator: _DataUpdateCoordinatorT, + description: EntityDescription, + ) -> None: + """Initialize the Peblar entity.""" + super().__init__(coordinator=coordinator) + self.entity_description = description + self._attr_unique_id = f"{entry.unique_id}_{description.key}" + + system_information = entry.runtime_data.system_information + self._attr_device_info = DeviceInfo( + configuration_url=f"http://{entry.data[CONF_HOST]}", + connections={ + (dr.CONNECTION_NETWORK_MAC, system_information.ethernet_mac_address), + (dr.CONNECTION_NETWORK_MAC, system_information.wlan_mac_address), + }, + identifiers={ + (DOMAIN, entry.runtime_data.system_information.product_serial_number) + }, + manufacturer=system_information.product_vendor_name, + model=system_information.product_model_name, + model_id=system_information.product_number, + name="Peblar EV Charger", + serial_number=system_information.product_serial_number, + sw_version=entry.runtime_data.version_coordinator.data.current.firmware, + ) diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py index 32647199b8b..d17ff09eb94 100644 --- a/homeassistant/components/peblar/number.py +++ b/homeassistant/components/peblar/number.py @@ -15,17 +15,15 @@ from homeassistant.components.number import ( ) from homeassistant.const import EntityCategory, UnitOfElectricCurrent from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import ( PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator, PeblarRuntimeData, ) +from .entity import PeblarEntity PARALLEL_UPDATES = 1 @@ -64,33 +62,29 @@ async def async_setup_entry( async_add_entities( PeblarNumberEntity( entry=entry, + coordinator=entry.runtime_data.data_coordinator, description=description, ) for description in DESCRIPTIONS ) -class PeblarNumberEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], NumberEntity): +class PeblarNumberEntity( + PeblarEntity[PeblarDataUpdateCoordinator], + NumberEntity, +): """Defines a Peblar number.""" entity_description: PeblarNumberEntityDescription - _attr_has_entity_name = True - def __init__( self, entry: PeblarConfigEntry, + coordinator: PeblarDataUpdateCoordinator, description: PeblarNumberEntityDescription, ) -> None: """Initialize the Peblar entity.""" - super().__init__(entry.runtime_data.data_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) + super().__init__(entry=entry, coordinator=coordinator, description=description) self._attr_native_max_value = description.native_max_value_fn( entry.runtime_data ) diff --git a/homeassistant/components/peblar/select.py b/homeassistant/components/peblar/select.py index d2c74f482c6..e9c7da77bec 100644 --- a/homeassistant/components/peblar/select.py +++ b/homeassistant/components/peblar/select.py @@ -11,12 +11,10 @@ from peblar import Peblar, PeblarUserConfiguration, SmartChargingMode from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator +from .entity import PeblarEntity PARALLEL_UPDATES = 1 @@ -56,6 +54,7 @@ async def async_setup_entry( async_add_entities( PeblarSelectEntity( entry=entry, + coordinator=entry.runtime_data.user_configuration_coordinator, description=description, ) for description in DESCRIPTIONS @@ -63,29 +62,13 @@ async def async_setup_entry( class PeblarSelectEntity( - CoordinatorEntity[PeblarUserConfigurationDataUpdateCoordinator], SelectEntity + PeblarEntity[PeblarUserConfigurationDataUpdateCoordinator], + SelectEntity, ): - """Defines a peblar select entity.""" + """Defines a Peblar select entity.""" entity_description: PeblarSelectEntityDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarSelectEntityDescription, - ) -> None: - """Initialize the select entity.""" - super().__init__(entry.runtime_data.user_configuraton_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}-{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - @property def current_option(self) -> str | None: """Return the selected entity option to represent the entity state.""" diff --git a/homeassistant/components/peblar/sensor.py b/homeassistant/components/peblar/sensor.py index df8cac13bcc..e655253d75c 100644 --- a/homeassistant/components/peblar/sensor.py +++ b/homeassistant/components/peblar/sensor.py @@ -22,17 +22,15 @@ from homeassistant.const import ( UnitOfPower, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utcnow from .const import ( - DOMAIN, PEBLAR_CHARGE_LIMITER_TO_HOME_ASSISTANT, PEBLAR_CP_STATE_TO_HOME_ASSISTANT, ) from .coordinator import PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator +from .entity import PeblarEntity PARALLEL_UPDATES = 0 @@ -237,34 +235,21 @@ async def async_setup_entry( ) -> None: """Set up Peblar sensors based on a config entry.""" async_add_entities( - PeblarSensorEntity(entry, description) + PeblarSensorEntity( + entry=entry, + coordinator=entry.runtime_data.data_coordinator, + description=description, + ) for description in DESCRIPTIONS - if description.has_fn(entry.runtime_data.user_configuraton_coordinator.data) + if description.has_fn(entry.runtime_data.user_configuration_coordinator.data) ) -class PeblarSensorEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], SensorEntity): +class PeblarSensorEntity(PeblarEntity[PeblarDataUpdateCoordinator], SensorEntity): """Defines a Peblar sensor.""" entity_description: PeblarSensorDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarSensorDescription, - ) -> None: - """Initialize the Peblar entity.""" - super().__init__(entry.runtime_data.data_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - @property def native_value(self) -> datetime | int | str | None: """Return the state of the sensor.""" diff --git a/homeassistant/components/peblar/switch.py b/homeassistant/components/peblar/switch.py index 3b7ab591508..88f52d01e3a 100644 --- a/homeassistant/components/peblar/switch.py +++ b/homeassistant/components/peblar/switch.py @@ -11,17 +11,15 @@ from peblar import PeblarApi from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import ( PeblarConfigEntry, PeblarData, PeblarDataUpdateCoordinator, PeblarRuntimeData, ) +from .entity import PeblarEntity PARALLEL_UPDATES = 1 @@ -42,7 +40,7 @@ DESCRIPTIONS = [ entity_category=EntityCategory.CONFIG, has_fn=lambda x: ( x.data_coordinator.data.system.force_single_phase_allowed - and x.user_configuraton_coordinator.data.connected_phases > 1 + and x.user_configuration_coordinator.data.connected_phases > 1 ), is_on_fn=lambda x: x.ev.force_single_phase, set_fn=lambda x, on: x.ev_interface(force_single_phase=on), @@ -59,6 +57,7 @@ async def async_setup_entry( async_add_entities( PeblarSwitchEntity( entry=entry, + coordinator=entry.runtime_data.data_coordinator, description=description, ) for description in DESCRIPTIONS @@ -66,28 +65,14 @@ async def async_setup_entry( ) -class PeblarSwitchEntity(CoordinatorEntity[PeblarDataUpdateCoordinator], SwitchEntity): +class PeblarSwitchEntity( + PeblarEntity[PeblarDataUpdateCoordinator], + SwitchEntity, +): """Defines a Peblar switch entity.""" entity_description: PeblarSwitchEntityDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarSwitchEntityDescription, - ) -> None: - """Initialize the select entity.""" - super().__init__(entry.runtime_data.data_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}-{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - @property def is_on(self) -> bool: """Return state of the switch.""" diff --git a/homeassistant/components/peblar/update.py b/homeassistant/components/peblar/update.py index 37f20722b98..67ce30a89a6 100644 --- a/homeassistant/components/peblar/update.py +++ b/homeassistant/components/peblar/update.py @@ -11,16 +11,14 @@ from homeassistant.components.update import ( UpdateEntityDescription, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import ( PeblarConfigEntry, PeblarVersionDataUpdateCoordinator, PeblarVersionInformation, ) +from .entity import PeblarEntity PARALLEL_UPDATES = 1 @@ -56,34 +54,23 @@ async def async_setup_entry( ) -> None: """Set up Peblar update based on a config entry.""" async_add_entities( - PeblarUpdateEntity(entry, description) for description in DESCRIPTIONS + PeblarUpdateEntity( + entry=entry, + coordinator=entry.runtime_data.version_coordinator, + description=description, + ) + for description in DESCRIPTIONS ) class PeblarUpdateEntity( - CoordinatorEntity[PeblarVersionDataUpdateCoordinator], UpdateEntity + PeblarEntity[PeblarVersionDataUpdateCoordinator], + UpdateEntity, ): """Defines a Peblar update entity.""" entity_description: PeblarUpdateEntityDescription - _attr_has_entity_name = True - - def __init__( - self, - entry: PeblarConfigEntry, - description: PeblarUpdateEntityDescription, - ) -> None: - """Initialize the update entity.""" - super().__init__(entry.runtime_data.version_coordinator) - self.entity_description = description - self._attr_unique_id = f"{entry.unique_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={ - (DOMAIN, entry.runtime_data.system_information.product_serial_number) - }, - ) - @property def installed_version(self) -> str | None: """Version currently installed and in use.""" diff --git a/tests/components/peblar/snapshots/test_binary_sensor.ambr b/tests/components/peblar/snapshots/test_binary_sensor.ambr index 5dd008dd320..72c3ac78a12 100644 --- a/tests/components/peblar/snapshots/test_binary_sensor.ambr +++ b/tests/components/peblar/snapshots/test_binary_sensor.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_error_codes', - 'unique_id': '23-45-A4O-MOF-active_error_codes', + 'unique_id': '23-45-A4O-MOF_active_error_codes', 'unit_of_measurement': None, }) # --- @@ -75,7 +75,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_warning_codes', - 'unique_id': '23-45-A4O-MOF-active_warning_codes', + 'unique_id': '23-45-A4O-MOF_active_warning_codes', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/peblar/snapshots/test_select.ambr b/tests/components/peblar/snapshots/test_select.ambr index 9f0852d7cf4..62e09325601 100644 --- a/tests/components/peblar/snapshots/test_select.ambr +++ b/tests/components/peblar/snapshots/test_select.ambr @@ -36,7 +36,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'smart_charging', - 'unique_id': '23-45-A4O-MOF-smart_charging', + 'unique_id': '23-45-A4O-MOF_smart_charging', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/peblar/snapshots/test_switch.ambr b/tests/components/peblar/snapshots/test_switch.ambr index f4fc768030f..53829278593 100644 --- a/tests/components/peblar/snapshots/test_switch.ambr +++ b/tests/components/peblar/snapshots/test_switch.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'force_single_phase', - 'unique_id': '23-45-A4O-MOF-force_single_phase', + 'unique_id': '23-45-A4O-MOF_force_single_phase', 'unit_of_measurement': None, }) # --- From 959f20c523705f71bf72640bd2987c03f03a64bb Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 14:23:12 +0100 Subject: [PATCH 618/677] Add reconfigure flow to Peblar Rocksolid EV Chargers integration (#133785) --- .../components/peblar/config_flow.py | 51 ++++++++ .../components/peblar/quality_scale.yaml | 2 +- homeassistant/components/peblar/strings.json | 15 ++- tests/components/peblar/test_config_flow.py | 109 ++++++++++++++++++ 4 files changed, 175 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/peblar/config_flow.py b/homeassistant/components/peblar/config_flow.py index 809cb13746e..29bf456b7ea 100644 --- a/homeassistant/components/peblar/config_flow.py +++ b/homeassistant/components/peblar/config_flow.py @@ -76,6 +76,57 @@ class PeblarFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of a Peblar device.""" + errors = {} + reconfigure_entry = self._get_reconfigure_entry() + + if user_input is not None: + peblar = Peblar( + host=user_input[CONF_HOST], + session=async_create_clientsession( + self.hass, cookie_jar=CookieJar(unsafe=True) + ), + ) + try: + await peblar.login(password=user_input[CONF_PASSWORD]) + info = await peblar.system_information() + except PeblarAuthenticationError: + errors[CONF_PASSWORD] = "invalid_auth" + except PeblarConnectionError: + errors[CONF_HOST] = "cannot_connect" + except Exception: # noqa: BLE001 + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(info.product_serial_number) + self._abort_if_unique_id_mismatch(reason="different_device") + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates=user_input, + ) + + host = reconfigure_entry.data[CONF_HOST] + if user_input is not None: + host = user_input[CONF_HOST] + + return self.async_show_form( + step_id="reconfigure", + data_schema=vol.Schema( + { + vol.Required(CONF_HOST, default=host): TextSelector( + TextSelectorConfig(autocomplete="off") + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } + ), + errors=errors, + ) + async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo ) -> ConfigFlowResult: diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index aea83da25ab..9de0031373f 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -66,7 +66,7 @@ rules: comment: | The coordinator needs translation when the update failed. icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: | diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index 0cce7ed8191..f09a156dd1e 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -2,8 +2,10 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "different_device": "The information entered is from a different Peblar EV charger.", "no_serial_number": "The discovered Peblar device did not provide a serial number.", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -20,6 +22,17 @@ }, "description": "Reauthenticate with your Peblar RV charger.\n\nTo do so, you will need to enter your new password you use to log into Peblar's device web interface." }, + "reconfigure": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "[%key:component::peblar::config::step::user::data_description::host%]", + "password": "[%key:component::peblar::config::step::user::data_description::password%]" + }, + "description": "Reconfigure your Peblar EV charger.\n\nThis allows you to change the IP address of your Peblar charger and the password you use to log into the Peblar device' web interface." + }, "user": { "data": { "host": "[%key:common::config_flow::data::host%]", diff --git a/tests/components/peblar/test_config_flow.py b/tests/components/peblar/test_config_flow.py index a4a461b6bba..a97e8d3b564 100644 --- a/tests/components/peblar/test_config_flow.py +++ b/tests/components/peblar/test_config_flow.py @@ -117,6 +117,115 @@ async def test_user_flow_already_configured( assert result["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_peblar") +async def test_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test the full happy path reconfigure flow from start to finish.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + assert mock_config_entry.data == { + CONF_HOST: "127.0.0.127", + CONF_PASSWORD: "OMGSPIDERS", + } + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + assert mock_config_entry.data == { + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + } + + +@pytest.mark.usefixtures("mock_peblar") +async def test_reconfigure_to_different_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfiguring to a different device doesn't work.""" + mock_config_entry.add_to_hass(hass) + + # Change the unique ID of the entry, so we have a mismatch + hass.config_entries.async_update_entry(mock_config_entry, unique_id="mismatch") + + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "different_device" + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [ + (PeblarConnectionError, {CONF_HOST: "cannot_connect"}), + (PeblarAuthenticationError, {CONF_PASSWORD: "invalid_auth"}), + (Exception, {"base": "unknown"}), + ], +) +async def test_reconfigure_flow_errors( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + expected_error: dict[str, str], +) -> None: + """Test we show user form on a connection error.""" + mock_config_entry.add_to_hass(hass) + mock_peblar.login.side_effect = side_effect + + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.1", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == expected_error + + mock_peblar.login.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "127.0.0.2", + CONF_PASSWORD: "OMGPUPPIES", + }, + ) + assert result["type"] is FlowResultType.ABORT + + assert mock_config_entry.data == { + CONF_HOST: "127.0.0.2", + CONF_PASSWORD: "OMGPUPPIES", + } + + @pytest.mark.usefixtures("mock_peblar") async def test_zeroconf_flow(hass: HomeAssistant) -> None: """Test the zeroconf happy flow from start to finish.""" From 26d5c55d110c3675faf1497836acec0ed236b06e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 15:35:45 +0100 Subject: [PATCH 619/677] Add button error handling for Peblar Rocksolid EV Chargers (#133802) --- homeassistant/components/peblar/button.py | 2 + homeassistant/components/peblar/helpers.py | 55 ++++++++ homeassistant/components/peblar/strings.json | 11 ++ tests/components/peblar/test_button.py | 125 ++++++++++++++++++- 4 files changed, 189 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/peblar/helpers.py diff --git a/homeassistant/components/peblar/button.py b/homeassistant/components/peblar/button.py index 66411daa228..22150c82649 100644 --- a/homeassistant/components/peblar/button.py +++ b/homeassistant/components/peblar/button.py @@ -19,6 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator from .entity import PeblarEntity +from .helpers import peblar_exception_handler PARALLEL_UPDATES = 1 @@ -72,6 +73,7 @@ class PeblarButtonEntity( entity_description: PeblarButtonEntityDescription + @peblar_exception_handler async def async_press(self) -> None: """Trigger button press on the Peblar device.""" await self.entity_description.press_fn(self.coordinator.peblar) diff --git a/homeassistant/components/peblar/helpers.py b/homeassistant/components/peblar/helpers.py new file mode 100644 index 00000000000..cc1eb228803 --- /dev/null +++ b/homeassistant/components/peblar/helpers.py @@ -0,0 +1,55 @@ +"""Helpers for Peblar.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +from typing import Any, Concatenate + +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError + +from homeassistant.exceptions import HomeAssistantError + +from .const import DOMAIN +from .entity import PeblarEntity + + +def peblar_exception_handler[_PeblarEntityT: PeblarEntity, **_P]( + func: Callable[Concatenate[_PeblarEntityT, _P], Coroutine[Any, Any, Any]], +) -> Callable[Concatenate[_PeblarEntityT, _P], Coroutine[Any, Any, None]]: + """Decorate Peblar calls to handle exceptions. + + A decorator that wraps the passed in function, catches Peblar errors. + """ + + async def handler( + self: _PeblarEntityT, *args: _P.args, **kwargs: _P.kwargs + ) -> None: + try: + await func(self, *args, **kwargs) + self.coordinator.async_update_listeners() + + except PeblarAuthenticationError as error: + # Reload the config entry to trigger reauth flow + self.hass.config_entries.async_schedule_reload( + self.coordinator.config_entry.entry_id + ) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="authentication_error", + ) from error + + except PeblarConnectionError as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="communication_error", + translation_placeholders={"error": str(error)}, + ) from error + + except PeblarError as error: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="unknown_error", + translation_placeholders={"error": str(error)}, + ) from error + + return handler diff --git a/homeassistant/components/peblar/strings.json b/homeassistant/components/peblar/strings.json index f09a156dd1e..a6fa3acf457 100644 --- a/homeassistant/components/peblar/strings.json +++ b/homeassistant/components/peblar/strings.json @@ -161,5 +161,16 @@ "name": "Customization" } } + }, + "exceptions": { + "authentication_error": { + "message": "An authentication failure occurred while communicating with the Peblar device." + }, + "communication_error": { + "message": "An error occurred while communicating with the Peblar device: {error}" + }, + "unknown_error": { + "message": "An unknown error occurred while communicating with the Peblar device: {error}" + } } } diff --git a/tests/components/peblar/test_button.py b/tests/components/peblar/test_button.py index 7b271d3747a..e9ab377db67 100644 --- a/tests/components/peblar/test_button.py +++ b/tests/components/peblar/test_button.py @@ -1,19 +1,29 @@ """Tests for the Peblar button platform.""" +from unittest.mock import MagicMock + +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.peblar.const import DOMAIN -from homeassistant.const import Platform +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +pytestmark = [ + pytest.mark.freeze_time("2024-12-21 21:45:00"), + pytest.mark.parametrize("init_integration", [Platform.BUTTON], indirect=True), + pytest.mark.usefixtures("init_integration"), +] -@pytest.mark.freeze_time("2024-12-21 21:45:00") -@pytest.mark.parametrize("init_integration", [Platform.BUTTON], indirect=True) -@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -34,3 +44,110 @@ async def test_entities( ) for entity_entry in entity_entries: assert entity_entry.device_id == device_entry.id + + +@pytest.mark.parametrize( + ("entity_id", "method"), + [ + ("button.peblar_ev_charger_identify", "identify"), + ("button.peblar_ev_charger_restart", "reboot"), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_buttons( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, + entity_id: str, + method: str, +) -> None: + """Test the Peblar EV charger buttons.""" + mocked_method = getattr(mock_peblar, method) + + # Test normal happy path button press + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert len(mocked_method.mock_calls) == 1 + mocked_method.assert_called_with() + + # Test connection error handling + mocked_method.side_effect = PeblarConnectionError("Could not connect") + with pytest.raises( + HomeAssistantError, + match=( + r"An error occurred while communicating " + r"with the Peblar device: Could not connect" + ), + ) as excinfo: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "communication_error" + assert excinfo.value.translation_placeholders == {"error": "Could not connect"} + + # Test unknown error handling + mocked_method.side_effect = PeblarError("Unknown error") + with pytest.raises( + HomeAssistantError, + match=( + r"An unknown error occurred while communicating " + r"with the Peblar device: Unknown error" + ), + ) as excinfo: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "unknown_error" + assert excinfo.value.translation_placeholders == {"error": "Unknown error"} + + # Test authentication error handling + mocked_method.side_effect = PeblarAuthenticationError("Authentication error") + mock_peblar.login.side_effect = PeblarAuthenticationError("Authentication error") + with pytest.raises( + HomeAssistantError, + match=( + r"An authentication failure occurred while communicating " + r"with the Peblar device" + ), + ) as excinfo: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "authentication_error" + assert not excinfo.value.translation_placeholders + + # Ensure the device is reloaded on authentication error and triggers + # a reauthentication flow. + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id From 3cc75c3cf62958488cdb64f4c23d659fa01c0e2a Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sun, 22 Dec 2024 15:17:23 +0000 Subject: [PATCH 620/677] Use feature checks in tplink integration (#133795) Clean up to use new upstream API: * Use Feature attributes to check for supported * Use color_temp range and update tests --- homeassistant/components/tplink/light.py | 28 +++-- tests/components/tplink/__init__.py | 30 +++-- .../components/tplink/fixtures/features.json | 4 +- .../tplink/snapshots/test_climate.ambr | 8 +- .../tplink/snapshots/test_number.ambr | 16 +-- tests/components/tplink/test_init.py | 19 ++- tests/components/tplink/test_light.py | 117 ++++++++++++++---- tests/components/tplink/test_sensor.py | 2 +- 8 files changed, 157 insertions(+), 67 deletions(-) diff --git a/homeassistant/components/tplink/light.py b/homeassistant/components/tplink/light.py index 8d6ec27f81c..f3207d754f3 100644 --- a/homeassistant/components/tplink/light.py +++ b/homeassistant/components/tplink/light.py @@ -200,14 +200,13 @@ class TPLinkLightEntity(CoordinatedTPLinkEntity, LightEntity): # If _attr_name is None the entity name will be the device name self._attr_name = None if parent is None else device.alias modes: set[ColorMode] = {ColorMode.ONOFF} - if light_module.is_variable_color_temp: + if color_temp_feat := light_module.get_feature("color_temp"): modes.add(ColorMode.COLOR_TEMP) - temp_range = light_module.valid_temperature_range - self._attr_min_color_temp_kelvin = temp_range.min - self._attr_max_color_temp_kelvin = temp_range.max - if light_module.is_color: + self._attr_min_color_temp_kelvin = color_temp_feat.minimum_value + self._attr_max_color_temp_kelvin = color_temp_feat.maximum_value + if light_module.has_feature("hsv"): modes.add(ColorMode.HS) - if light_module.is_dimmable: + if light_module.has_feature("brightness"): modes.add(ColorMode.BRIGHTNESS) self._attr_supported_color_modes = filter_supported_color_modes(modes) if len(self._attr_supported_color_modes) == 1: @@ -270,15 +269,17 @@ class TPLinkLightEntity(CoordinatedTPLinkEntity, LightEntity): self, color_temp: float, brightness: int | None, transition: int | None ) -> None: light_module = self._light_module - valid_temperature_range = light_module.valid_temperature_range + color_temp_feat = light_module.get_feature("color_temp") + assert color_temp_feat + requested_color_temp = round(color_temp) # Clamp color temp to valid range # since if the light in a group we will # get requests for color temps for the range # of the group and not the light clamped_color_temp = min( - valid_temperature_range.max, - max(valid_temperature_range.min, requested_color_temp), + color_temp_feat.maximum_value, + max(color_temp_feat.minimum_value, requested_color_temp), ) await light_module.set_color_temp( clamped_color_temp, @@ -325,8 +326,11 @@ class TPLinkLightEntity(CoordinatedTPLinkEntity, LightEntity): # The light supports only a single color mode, return it return self._fixed_color_mode - # The light supports both color temp and color, determine which on is active - if self._light_module.is_variable_color_temp and self._light_module.color_temp: + # The light supports both color temp and color, determine which one is active + if ( + self._light_module.has_feature("color_temp") + and self._light_module.color_temp + ): return ColorMode.COLOR_TEMP return ColorMode.HS @@ -335,7 +339,7 @@ class TPLinkLightEntity(CoordinatedTPLinkEntity, LightEntity): """Update the entity's attributes.""" light_module = self._light_module self._attr_is_on = light_module.state.light_on is True - if light_module.is_dimmable: + if light_module.has_feature("brightness"): self._attr_brightness = round((light_module.brightness * 255.0) / 100.0) color_mode = self._determine_color_mode() self._attr_color_mode = color_mode diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index 809ab3bfd78..fdef5c35bfa 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -257,20 +257,27 @@ def _mocked_device( for module_name in modules } + device_features = {} if features: - device.features = { + device_features = { feature_id: _mocked_feature(feature_id, require_fixture=True) for feature_id in features if isinstance(feature_id, str) } - device.features.update( + device_features.update( { feature.id: feature for feature in features if isinstance(feature, Feature) } ) + device.features = device_features + + for mod in device.modules.values(): + mod.get_feature.side_effect = device_features.get + mod.has_feature.side_effect = lambda id: id in device_features + device.children = [] if children: for child in children: @@ -289,6 +296,7 @@ def _mocked_device( device.protocol = _mock_protocol() device.config = device_config device.credentials_hash = credentials_hash + return device @@ -303,8 +311,8 @@ def _mocked_feature( precision_hint=None, choices=None, unit=None, - minimum_value=0, - maximum_value=2**16, # Arbitrary max + minimum_value=None, + maximum_value=None, ) -> Feature: """Get a mocked feature. @@ -334,11 +342,14 @@ def _mocked_feature( feature.unit = unit or fixture.get("unit") # number - feature.minimum_value = minimum_value or fixture.get("minimum_value") - feature.maximum_value = maximum_value or fixture.get("maximum_value") + min_val = minimum_value or fixture.get("minimum_value") + feature.minimum_value = 0 if min_val is None else min_val + max_val = maximum_value or fixture.get("maximum_value") + feature.maximum_value = 2**16 if max_val is None else max_val # select feature.choices = choices or fixture.get("choices") + return feature @@ -350,13 +361,7 @@ def _mocked_light_module(device) -> Light: light.state = LightState( light_on=True, brightness=light.brightness, color_temp=light.color_temp ) - light.is_color = True - light.is_variable_color_temp = True - light.is_dimmable = True - light.is_brightness = True - light.has_effects = False light.hsv = (10, 30, 5) - light.valid_temperature_range = ColorTempRange(min=4000, max=9000) light.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} async def _set_state(state, *_, **__): @@ -389,7 +394,6 @@ def _mocked_light_module(device) -> Light: def _mocked_light_effect_module(device) -> LightEffect: effect = MagicMock(spec=LightEffect, name="Mocked light effect") - effect.has_effects = True effect.has_custom_effects = True effect.effect = "Effect1" effect.effect_list = ["Off", "Effect1", "Effect2"] diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index f60132fd2c2..d822bfc9b57 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -267,7 +267,9 @@ "target_temperature": { "value": false, "type": "Number", - "category": "Primary" + "category": "Primary", + "minimum_value": 5, + "maximum_value": 30 }, "fan_speed_level": { "value": 2, diff --git a/tests/components/tplink/snapshots/test_climate.ambr b/tests/components/tplink/snapshots/test_climate.ambr index 8236f332046..6823c373b68 100644 --- a/tests/components/tplink/snapshots/test_climate.ambr +++ b/tests/components/tplink/snapshots/test_climate.ambr @@ -9,8 +9,8 @@ , , ]), - 'max_temp': 65536, - 'min_temp': None, + 'max_temp': 30, + 'min_temp': 5, }), 'config_entry_id': , 'device_class': None, @@ -49,8 +49,8 @@ , , ]), - 'max_temp': 65536, - 'min_temp': None, + 'max_temp': 30, + 'min_temp': 5, 'supported_features': , 'temperature': 22.2, }), diff --git a/tests/components/tplink/snapshots/test_number.ambr b/tests/components/tplink/snapshots/test_number.ambr index 977d2098fb9..dbb58bac01b 100644 --- a/tests/components/tplink/snapshots/test_number.ambr +++ b/tests/components/tplink/snapshots/test_number.ambr @@ -41,7 +41,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, @@ -77,7 +77,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'my_device Smooth off', - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, @@ -96,7 +96,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, @@ -132,7 +132,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'my_device Smooth on', - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, @@ -151,7 +151,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 65536, + 'max': 10, 'min': -10, 'mode': , 'step': 1.0, @@ -187,7 +187,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'my_device Temperature offset', - 'max': 65536, + 'max': 10, 'min': -10, 'mode': , 'step': 1.0, @@ -206,7 +206,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, @@ -242,7 +242,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'my_device Turn off in', - 'max': 65536, + 'max': 60, 'min': 0, 'mode': , 'step': 1.0, diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index 766e6784c8b..dd967e0e0d6 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -54,6 +54,7 @@ from . import ( MAC_ADDRESS, MODEL, _mocked_device, + _mocked_feature, _patch_connect, _patch_discovery, _patch_single_discovery, @@ -335,7 +336,14 @@ async def test_update_attrs_fails_in_init( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) config_entry.add_to_hass(hass) - light = _mocked_device(modules=[Module.Light], alias="my_light") + features = [ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ] + light = _mocked_device(modules=[Module.Light], alias="my_light", features=features) light_module = light.modules[Module.Light] p = PropertyMock(side_effect=KasaException) type(light_module).color_temp = p @@ -363,7 +371,14 @@ async def test_update_attrs_fails_on_update( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) config_entry.add_to_hass(hass) - light = _mocked_device(modules=[Module.Light], alias="my_light") + features = [ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ] + light = _mocked_device(modules=[Module.Light], alias="my_light", features=features) light_module = light.modules[Module.Light] with _patch_discovery(device=light), _patch_connect(device=light): diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index b7f4ed6b8f4..6549711b7fc 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -54,6 +54,7 @@ from . import ( DEVICE_ID, MAC_ADDRESS, _mocked_device, + _mocked_feature, _patch_connect, _patch_discovery, _patch_single_discovery, @@ -118,8 +119,32 @@ async def test_legacy_dimmer_unique_id(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("device", "transition"), [ - (_mocked_device(modules=[Module.Light]), 2.0), - (_mocked_device(modules=[Module.Light, Module.LightEffect]), None), + ( + _mocked_device( + modules=[Module.Light], + features=[ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ], + ), + 2.0, + ), + ( + _mocked_device( + modules=[Module.Light, Module.LightEffect], + features=[ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ], + ), + None, + ), ], ) async def test_color_light( @@ -131,7 +156,10 @@ async def test_color_light( ) already_migrated_config_entry.add_to_hass(hass) light = device.modules[Module.Light] + + # Setting color_temp to None emulates a device with active effects light.color_temp = None + with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() @@ -220,9 +248,14 @@ async def test_color_light_no_temp(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device(modules=[Module.Light], alias="my_light") + features = [ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + ] + + device = _mocked_device(modules=[Module.Light], alias="my_light", features=features) light = device.modules[Module.Light] - light.is_variable_color_temp = False + type(light).color_temp = PropertyMock(side_effect=Exception) with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -272,25 +305,47 @@ async def test_color_light_no_temp(hass: HomeAssistant) -> None: @pytest.mark.parametrize( - ("bulb", "is_color"), + ("device", "is_color"), [ - (_mocked_device(modules=[Module.Light], alias="my_light"), True), - (_mocked_device(modules=[Module.Light], alias="my_light"), False), + ( + _mocked_device( + modules=[Module.Light], + alias="my_light", + features=[ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ], + ), + True, + ), + ( + _mocked_device( + modules=[Module.Light], + alias="my_light", + features=[ + _mocked_feature("brightness", value=50), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ], + ), + False, + ), ], ) async def test_color_temp_light( - hass: HomeAssistant, bulb: MagicMock, is_color: bool + hass: HomeAssistant, device: MagicMock, is_color: bool ) -> None: """Test a light.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device(modules=[Module.Light], alias="my_light") + # device = _mocked_device(modules=[Module.Light], alias="my_light") light = device.modules[Module.Light] - light.is_color = is_color - light.color_temp = 4000 - light.is_variable_color_temp = True with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -303,7 +358,7 @@ async def test_color_temp_light( attributes = state.attributes assert attributes[ATTR_BRIGHTNESS] == 128 assert attributes[ATTR_COLOR_MODE] == "color_temp" - if light.is_color: + if is_color: assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] else: assert attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp"] @@ -368,10 +423,11 @@ async def test_brightness_only_light(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device(modules=[Module.Light], alias="my_light") + features = [ + _mocked_feature("brightness", value=50), + ] + device = _mocked_device(modules=[Module.Light], alias="my_light", features=features) light = device.modules[Module.Light] - light.is_color = False - light.is_variable_color_temp = False with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -414,11 +470,8 @@ async def test_on_off_light(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device(modules=[Module.Light], alias="my_light") + device = _mocked_device(modules=[Module.Light], alias="my_light", features=[]) light = device.modules[Module.Light] - light.is_color = False - light.is_variable_color_temp = False - light.is_dimmable = False with _patch_discovery(device=device), _patch_connect(device=device): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -450,11 +503,9 @@ async def test_off_at_start_light(hass: HomeAssistant) -> None: domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) - device = _mocked_device(modules=[Module.Light], alias="my_light") + device = _mocked_device(modules=[Module.Light], alias="my_light", features=[]) light = device.modules[Module.Light] - light.is_color = False - light.is_variable_color_temp = False - light.is_dimmable = False + light.state = LightState(light_on=False) with _patch_discovery(device=device), _patch_connect(device=device): @@ -513,8 +564,15 @@ async def test_smart_strip_effects( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) + features = [ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ] device = _mocked_device( - modules=[Module.Light, Module.LightEffect], alias="my_light" + modules=[Module.Light, Module.LightEffect], alias="my_light", features=features ) light = device.modules[Module.Light] light_effect = device.modules[Module.LightEffect] @@ -977,8 +1035,15 @@ async def test_scene_effect_light( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS ) already_migrated_config_entry.add_to_hass(hass) + features = [ + _mocked_feature("brightness", value=50), + _mocked_feature("hsv", value=(10, 30, 5)), + _mocked_feature( + "color_temp", value=4000, minimum_value=4000, maximum_value=9000 + ), + ] device = _mocked_device( - modules=[Module.Light, Module.LightEffect], alias="my_light" + modules=[Module.Light, Module.LightEffect], alias="my_light", features=features ) light_effect = device.modules[Module.LightEffect] light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF diff --git a/tests/components/tplink/test_sensor.py b/tests/components/tplink/test_sensor.py index dda43c52430..a53b59df0dc 100644 --- a/tests/components/tplink/test_sensor.py +++ b/tests/components/tplink/test_sensor.py @@ -129,7 +129,7 @@ async def test_color_light_no_emeter(hass: HomeAssistant) -> None: ) already_migrated_config_entry.add_to_hass(hass) bulb = _mocked_device(alias="my_bulb", modules=[Module.Light]) - bulb.has_emeter = False + with _patch_discovery(device=bulb), _patch_connect(device=bulb): await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) await hass.async_block_till_done() From 484f149e617bad189aa4774160573165a2868acc Mon Sep 17 00:00:00 2001 From: Dave T <17680170+davet2001@users.noreply.github.com> Date: Sun, 22 Dec 2024 16:31:03 +0000 Subject: [PATCH 621/677] Add config flow stream preview to generic camera (#122563) Co-authored-by: Allen Porter --- homeassistant/components/generic/camera.py | 5 +- .../components/generic/config_flow.py | 172 +++++++++++++----- .../components/generic/manifest.json | 2 +- homeassistant/components/generic/strings.json | 15 +- tests/components/generic/conftest.py | 8 +- tests/components/generic/test_config_flow.py | 134 ++++++++++---- 6 files changed, 243 insertions(+), 93 deletions(-) diff --git a/homeassistant/components/generic/camera.py b/homeassistant/components/generic/camera.py index 3aac5145ca5..edefbc55ca6 100644 --- a/homeassistant/components/generic/camera.py +++ b/homeassistant/components/generic/camera.py @@ -96,10 +96,9 @@ class GenericCamera(Camera): self._stream_source = device_info.get(CONF_STREAM_SOURCE) if self._stream_source: self._stream_source = Template(self._stream_source, hass) - self._limit_refetch = device_info[CONF_LIMIT_REFETCH_TO_URL_CHANGE] - self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE] - if self._stream_source: self._attr_supported_features = CameraEntityFeature.STREAM + self._limit_refetch = device_info.get(CONF_LIMIT_REFETCH_TO_URL_CHANGE, False) + self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE] self.content_type = device_info[CONF_CONTENT_TYPE] self.verify_ssl = device_info[CONF_VERIFY_SSL] if device_info.get(CONF_RTSP_TRANSPORT): diff --git a/homeassistant/components/generic/config_flow.py b/homeassistant/components/generic/config_flow.py index 84243101bd6..83894b489f0 100644 --- a/homeassistant/components/generic/config_flow.py +++ b/homeassistant/components/generic/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Mapping import contextlib -from datetime import datetime +from datetime import datetime, timedelta from errno import EHOSTUNREACH, EIO import io import logging @@ -17,18 +17,21 @@ import PIL.Image import voluptuous as vol import yarl +from homeassistant.components import websocket_api from homeassistant.components.camera import ( CAMERA_IMAGE_TIMEOUT, + DOMAIN as CAMERA_DOMAIN, DynamicStreamSettings, _async_get_image, ) -from homeassistant.components.http import HomeAssistantView +from homeassistant.components.http.view import HomeAssistantView from homeassistant.components.stream import ( CONF_RTSP_TRANSPORT, CONF_USE_WALLCLOCK_AS_TIMESTAMPS, HLS_PROVIDER, RTSP_TRANSPORTS, SOURCE_TIMEOUT, + Stream, create_stream, ) from homeassistant.config_entries import ( @@ -49,7 +52,9 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, TemplateError from homeassistant.helpers import config_validation as cv, template as template_helper +from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.setup import async_prepare_setup_platform from homeassistant.util import slugify from .camera import GenericCamera, generate_auth @@ -79,6 +84,15 @@ SUPPORTED_IMAGE_TYPES = {"png", "jpeg", "gif", "svg+xml", "webp"} IMAGE_PREVIEWS_ACTIVE = "previews" +class InvalidStreamException(HomeAssistantError): + """Error to indicate an invalid stream.""" + + def __init__(self, error: str, details: str | None = None) -> None: + """Initialize the error.""" + super().__init__(error) + self.details = details + + def build_schema( user_input: Mapping[str, Any], is_options_flow: bool = False, @@ -231,12 +245,16 @@ def slug( return None -async def async_test_stream( +async def async_test_and_preview_stream( hass: HomeAssistant, info: Mapping[str, Any] -) -> dict[str, str]: - """Verify that the stream is valid before we create an entity.""" +) -> Stream | None: + """Verify that the stream is valid before we create an entity. + + Returns the stream object if valid. Raises InvalidStreamException if not. + The stream object is used to preview the video in the UI. + """ if not (stream_source := info.get(CONF_STREAM_SOURCE)): - return {} + return None # Import from stream.worker as stream cannot reexport from worker # without forcing the av dependency on default_config # pylint: disable-next=import-outside-toplevel @@ -248,7 +266,7 @@ async def async_test_stream( stream_source = stream_source.async_render(parse_result=False) except TemplateError as err: _LOGGER.warning("Problem rendering template %s: %s", stream_source, err) - return {CONF_STREAM_SOURCE: "template_error"} + raise InvalidStreamException("template_error") from err stream_options: dict[str, str | bool | float] = {} if rtsp_transport := info.get(CONF_RTSP_TRANSPORT): stream_options[CONF_RTSP_TRANSPORT] = rtsp_transport @@ -257,10 +275,10 @@ async def async_test_stream( try: url = yarl.URL(stream_source) - except ValueError: - return {CONF_STREAM_SOURCE: "malformed_url"} + except ValueError as err: + raise InvalidStreamException("malformed_url") from err if not url.is_absolute(): - return {CONF_STREAM_SOURCE: "relative_url"} + raise InvalidStreamException("relative_url") if not url.user and not url.password: username = info.get(CONF_USERNAME) password = info.get(CONF_PASSWORD) @@ -273,29 +291,28 @@ async def async_test_stream( stream_source, stream_options, DynamicStreamSettings(), - "test_stream", + f"{DOMAIN}.test_stream", ) hls_provider = stream.add_provider(HLS_PROVIDER) - await stream.start() - if not await hls_provider.part_recv(timeout=SOURCE_TIMEOUT): - hass.async_create_task(stream.stop()) - return {CONF_STREAM_SOURCE: "timeout"} - await stream.stop() except StreamWorkerError as err: - return {CONF_STREAM_SOURCE: "unknown_with_details", "error_details": str(err)} - except PermissionError: - return {CONF_STREAM_SOURCE: "stream_not_permitted"} + raise InvalidStreamException("unknown_with_details", str(err)) from err + except PermissionError as err: + raise InvalidStreamException("stream_not_permitted") from err except OSError as err: if err.errno == EHOSTUNREACH: - return {CONF_STREAM_SOURCE: "stream_no_route_to_host"} + raise InvalidStreamException("stream_no_route_to_host") from err if err.errno == EIO: # input/output error - return {CONF_STREAM_SOURCE: "stream_io_error"} + raise InvalidStreamException("stream_io_error") from err raise except HomeAssistantError as err: if "Stream integration is not set up" in str(err): - return {CONF_STREAM_SOURCE: "stream_not_set_up"} + raise InvalidStreamException("stream_not_set_up") from err raise - return {} + await stream.start() + if not await hls_provider.part_recv(timeout=SOURCE_TIMEOUT): + hass.async_create_task(stream.stop()) + raise InvalidStreamException("timeout") + return stream def register_preview(hass: HomeAssistant) -> None: @@ -316,6 +333,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize Generic ConfigFlow.""" self.preview_cam: dict[str, Any] = {} + self.preview_stream: Stream | None = None self.user_input: dict[str, Any] = {} self.title = "" @@ -326,14 +344,6 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return GenericOptionsFlowHandler() - def check_for_existing(self, options: dict[str, Any]) -> bool: - """Check whether an existing entry is using the same URLs.""" - return any( - entry.options.get(CONF_STILL_IMAGE_URL) == options.get(CONF_STILL_IMAGE_URL) - and entry.options.get(CONF_STREAM_SOURCE) == options.get(CONF_STREAM_SOURCE) - for entry in self._async_current_entries() - ) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -349,10 +359,17 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "no_still_image_or_stream_url" else: errors, still_format = await async_test_still(hass, user_input) - errors = errors | await async_test_stream(hass, user_input) + try: + self.preview_stream = await async_test_and_preview_stream( + hass, user_input + ) + except InvalidStreamException as err: + errors[CONF_STREAM_SOURCE] = str(err) + if err.details: + errors["error_details"] = err.details + self.preview_stream = None if not errors: user_input[CONF_CONTENT_TYPE] = still_format - user_input[CONF_LIMIT_REFETCH_TO_URL_CHANGE] = False still_url = user_input.get(CONF_STILL_IMAGE_URL) stream_url = user_input.get(CONF_STREAM_SOURCE) name = ( @@ -365,14 +382,9 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_CONTENT_TYPE] = "image/jpeg" self.user_input = user_input self.title = name - - if still_url is None: - return self.async_create_entry( - title=self.title, data={}, options=self.user_input - ) # temporary preview for user to check the image self.preview_cam = user_input - return await self.async_step_user_confirm_still() + return await self.async_step_user_confirm() if "error_details" in errors: description_placeholders["error"] = errors.pop("error_details") elif self.user_input: @@ -386,11 +398,14 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_user_confirm_still( + async def async_step_user_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle user clicking confirm after still preview.""" if user_input: + if ha_stream := self.preview_stream: + # Kill off the temp stream we created. + await ha_stream.stop() if not user_input.get(CONF_CONFIRMED_OK): return await self.async_step_user() return self.async_create_entry( @@ -399,7 +414,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): register_preview(self.hass) preview_url = f"/api/generic/preview_flow_image/{self.flow_id}?t={datetime.now().isoformat()}" return self.async_show_form( - step_id="user_confirm_still", + step_id="user_confirm", data_schema=vol.Schema( { vol.Required(CONF_CONFIRMED_OK, default=False): bool, @@ -407,8 +422,14 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): ), description_placeholders={"preview_url": preview_url}, errors=None, + preview="generic_camera", ) + @staticmethod + async def async_setup_preview(hass: HomeAssistant) -> None: + """Set up preview WS API.""" + websocket_api.async_register_command(hass, ws_start_preview) + class GenericOptionsFlowHandler(OptionsFlow): """Handle Generic IP Camera options.""" @@ -423,13 +444,21 @@ class GenericOptionsFlowHandler(OptionsFlow): ) -> ConfigFlowResult: """Manage Generic IP Camera options.""" errors: dict[str, str] = {} + description_placeholders = {} hass = self.hass if user_input is not None: errors, still_format = await async_test_still( hass, self.config_entry.options | user_input ) - errors = errors | await async_test_stream(hass, user_input) + try: + await async_test_and_preview_stream(hass, user_input) + except InvalidStreamException as err: + errors[CONF_STREAM_SOURCE] = str(err) + if err.details: + errors["error_details"] = err.details + # Stream preview during options flow not yet implemented + still_url = user_input.get(CONF_STILL_IMAGE_URL) if not errors: if still_url is None: @@ -449,6 +478,8 @@ class GenericOptionsFlowHandler(OptionsFlow): # temporary preview for user to check the image self.preview_cam = data return await self.async_step_confirm_still() + if "error_details" in errors: + description_placeholders["error"] = errors.pop("error_details") return self.async_show_form( step_id="init", data_schema=build_schema( @@ -456,6 +487,7 @@ class GenericOptionsFlowHandler(OptionsFlow): True, self.show_advanced_options, ), + description_placeholders=description_placeholders, errors=errors, ) @@ -518,3 +550,59 @@ class CameraImagePreview(HomeAssistantView): CAMERA_IMAGE_TIMEOUT, ) return web.Response(body=image.content, content_type=image.content_type) + + +@websocket_api.websocket_command( + { + vol.Required("type"): "generic_camera/start_preview", + vol.Required("flow_id"): str, + vol.Optional("flow_type"): vol.Any("config_flow"), + vol.Optional("user_input"): dict, + } +) +@websocket_api.async_response +async def ws_start_preview( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Generate websocket handler for the camera still/stream preview.""" + _LOGGER.debug("Generating websocket handler for generic camera preview") + + flow_id = msg["flow_id"] + flow = cast( + GenericIPCamConfigFlow, + hass.config_entries.flow._progress.get(flow_id), # noqa: SLF001 + ) + user_input = flow.preview_cam + + # Create an EntityPlatform, needed for name translations + platform = await async_prepare_setup_platform(hass, {}, CAMERA_DOMAIN, DOMAIN) + entity_platform = EntityPlatform( + hass=hass, + logger=_LOGGER, + domain=CAMERA_DOMAIN, + platform_name=DOMAIN, + platform=platform, + scan_interval=timedelta(seconds=3600), + entity_namespace=None, + ) + await entity_platform.async_load_translations() + + ha_still_url = None + ha_stream_url = None + + if user_input.get(CONF_STILL_IMAGE_URL): + ha_still_url = f"/api/generic/preview_flow_image/{msg['flow_id']}?t={datetime.now().isoformat()}" + _LOGGER.debug("Got preview still URL: %s", ha_still_url) + + if ha_stream := flow.preview_stream: + ha_stream_url = ha_stream.endpoint_url(HLS_PROVIDER) + _LOGGER.debug("Got preview stream URL: %s", ha_stream_url) + + connection.send_message( + websocket_api.event_message( + msg["id"], + {"attributes": {"still_url": ha_still_url, "stream_url": ha_stream_url}}, + ) + ) diff --git a/homeassistant/components/generic/manifest.json b/homeassistant/components/generic/manifest.json index c1fbc16d9be..0b6f07e8205 100644 --- a/homeassistant/components/generic/manifest.json +++ b/homeassistant/components/generic/manifest.json @@ -3,7 +3,7 @@ "name": "Generic Camera", "codeowners": ["@davet2001"], "config_flow": true, - "dependencies": ["http"], + "dependencies": ["http", "stream"], "documentation": "https://www.home-assistant.io/integrations/generic", "integration_type": "device", "iot_class": "local_push", diff --git a/homeassistant/components/generic/strings.json b/homeassistant/components/generic/strings.json index 94360a5b7c2..b3ecadacba5 100644 --- a/homeassistant/components/generic/strings.json +++ b/homeassistant/components/generic/strings.json @@ -39,11 +39,11 @@ "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" } }, - "user_confirm_still": { - "title": "Preview", - "description": "![Camera Still Image Preview]({preview_url})", + "user_confirm": { + "title": "Confirmation", + "description": "Please wait for previews to load...", "data": { - "confirmed_ok": "This image looks good." + "confirmed_ok": "Everything looks good." } } } @@ -68,15 +68,16 @@ } }, "confirm_still": { - "title": "[%key:component::generic::config::step::user_confirm_still::title%]", - "description": "[%key:component::generic::config::step::user_confirm_still::description%]", + "title": "Preview", + "description": "![Camera Still Image Preview]({preview_url})", "data": { - "confirmed_ok": "[%key:component::generic::config::step::user_confirm_still::data::confirmed_ok%]" + "confirmed_ok": "This image looks good." } } }, "error": { "unknown": "[%key:common::config_flow::error::unknown%]", + "unknown_with_details": "[%key:common::config_flow::error::unknown_with_details]", "already_exists": "[%key:component::generic::config::error::already_exists%]", "unable_still_load": "[%key:component::generic::config::error::unable_still_load%]", "unable_still_load_auth": "[%key:component::generic::config::error::unable_still_load_auth%]", diff --git a/tests/components/generic/conftest.py b/tests/components/generic/conftest.py index 69e6cc6b696..cdea83b599c 100644 --- a/tests/components/generic/conftest.py +++ b/tests/components/generic/conftest.py @@ -71,16 +71,18 @@ def fakeimg_gif(fakeimgbytes_gif: bytes) -> Generator[None]: respx.pop("fake_img") -@pytest.fixture(scope="package") -def mock_create_stream() -> _patch[MagicMock]: +@pytest.fixture +def mock_create_stream(hass: HomeAssistant) -> _patch[MagicMock]: """Mock create stream.""" - mock_stream = Mock() + mock_stream = MagicMock() + mock_stream.hass = hass mock_provider = Mock() mock_provider.part_recv = AsyncMock() mock_provider.part_recv.return_value = True mock_stream.add_provider.return_value = mock_provider mock_stream.start = AsyncMock() mock_stream.stop = AsyncMock() + mock_stream.endpoint_url.return_value = "http://127.0.0.1/nothing" return patch( "homeassistant.components.generic.config_flow.create_stream", return_value=mock_stream, diff --git a/tests/components/generic/test_config_flow.py b/tests/components/generic/test_config_flow.py index a882ca4cd8d..f121b210c0c 100644 --- a/tests/components/generic/test_config_flow.py +++ b/tests/components/generic/test_config_flow.py @@ -9,6 +9,7 @@ import os.path from pathlib import Path from unittest.mock import AsyncMock, MagicMock, PropertyMock, _patch, patch +from freezegun.api import FrozenDateTimeFactory import httpx import pytest import respx @@ -44,8 +45,8 @@ from homeassistant.data_entry_flow import FlowResultType from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry -from tests.typing import ClientSessionGenerator +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import ClientSessionGenerator, WebSocketGenerator TESTDATA = { CONF_STILL_IMAGE_URL: "http://127.0.0.1/testurl/1", @@ -75,6 +76,7 @@ async def test_form( hass_client: ClientSessionGenerator, user_flow: ConfigFlowResult, mock_create_stream: _patch[MagicMock], + hass_ws_client: WebSocketGenerator, ) -> None: """Test the form with a normal set of settings.""" @@ -90,18 +92,29 @@ async def test_form( TESTDATA, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" client = await hass_client() preview_url = result1["description_placeholders"]["preview_url"] # Check the preview image works. resp = await client.get(preview_url) assert resp.status == HTTPStatus.OK assert await resp.read() == fakeimgbytes_png + + # HA should now be serving a WS connection for a preview stream. + ws_client = await hass_ws_client() + flow_id = user_flow["flow_id"] + await ws_client.send_json_auto_id( + { + "type": "generic_camera/start_preview", + "flow_id": flow_id, + }, + ) + _ = await ws_client.receive_json() + result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "127_0_0_1" assert result2["options"] == { @@ -110,13 +123,11 @@ async def test_form( CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION, CONF_USERNAME: "fred_flintstone", CONF_PASSWORD: "bambam", - CONF_LIMIT_REFETCH_TO_URL_CHANGE: False, CONF_CONTENT_TYPE: "image/png", - CONF_FRAMERATE: 5, + CONF_FRAMERATE: 5.0, CONF_VERIFY_SSL: False, } - await hass.async_block_till_done() # Check that the preview image is disabled after. resp = await client.get(preview_url) assert resp.status == HTTPStatus.NOT_FOUND @@ -145,7 +156,7 @@ async def test_form_only_stillimage( ) await hass.async_block_till_done() assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, @@ -157,9 +168,8 @@ async def test_form_only_stillimage( CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION, CONF_USERNAME: "fred_flintstone", CONF_PASSWORD: "bambam", - CONF_LIMIT_REFETCH_TO_URL_CHANGE: False, CONF_CONTENT_TYPE: "image/png", - CONF_FRAMERATE: 5, + CONF_FRAMERATE: 5.0, CONF_VERIFY_SSL: False, } @@ -167,13 +177,13 @@ async def test_form_only_stillimage( @respx.mock -async def test_form_reject_still_preview( +async def test_form_reject_preview( hass: HomeAssistant, fakeimgbytes_png: bytes, mock_create_stream: _patch[MagicMock], user_flow: ConfigFlowResult, ) -> None: - """Test we go back to the config screen if the user rejects the still preview.""" + """Test we go back to the config screen if the user rejects the preview.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) with mock_create_stream: result1 = await hass.config_entries.flow.async_configure( @@ -181,7 +191,7 @@ async def test_form_reject_still_preview( TESTDATA, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: False}, @@ -211,7 +221,7 @@ async def test_form_still_preview_cam_off( TESTDATA, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" preview_url = result1["description_placeholders"]["preview_url"] # Try to view the image, should be unavailable. client = await hass_client() @@ -233,7 +243,7 @@ async def test_form_only_stillimage_gif( data, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, @@ -258,7 +268,7 @@ async def test_form_only_svg_whitespace( data, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, @@ -293,7 +303,7 @@ async def test_form_only_still_sample( data, ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, @@ -310,13 +320,13 @@ async def test_form_only_still_sample( ( "http://localhost:812{{3}}/static/icons/favicon-apple-180x180.png", "http://localhost:8123/static/icons/favicon-apple-180x180.png", - "user_confirm_still", + "user_confirm", None, ), ( "{% if 1 %}https://bla{% else %}https://yo{% endif %}", "https://bla/", - "user_confirm_still", + "user_confirm", None, ), ( @@ -385,7 +395,7 @@ async def test_form_rtsp_mode( user_flow["flow_id"], data ) assert result1["type"] is FlowResultType.FORM - assert result1["step_id"] == "user_confirm_still" + assert result1["step_id"] == "user_confirm" result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], user_input={CONF_CONFIRMED_OK: True}, @@ -399,13 +409,11 @@ async def test_form_rtsp_mode( CONF_RTSP_TRANSPORT: "tcp", CONF_USERNAME: "fred_flintstone", CONF_PASSWORD: "bambam", - CONF_LIMIT_REFETCH_TO_URL_CHANGE: False, CONF_CONTENT_TYPE: "image/png", - CONF_FRAMERATE: 5, + CONF_FRAMERATE: 5.0, CONF_VERIFY_SSL: False, } - await hass.async_block_till_done() assert len(mock_setup.mock_calls) == 1 @@ -419,33 +427,36 @@ async def test_form_only_stream( data = TESTDATA.copy() data.pop(CONF_STILL_IMAGE_URL) data[CONF_STREAM_SOURCE] = "rtsp://user:pass@127.0.0.1/testurl/2" - with mock_create_stream as mock_setup: + with mock_create_stream: result1 = await hass.config_entries.flow.async_configure( user_flow["flow_id"], data, ) - assert result1["type"] is FlowResultType.CREATE_ENTRY - assert result1["title"] == "127_0_0_1" - assert result1["options"] == { + + assert result1["type"] is FlowResultType.FORM + with mock_create_stream: + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + user_input={CONF_CONFIRMED_OK: True}, + ) + + assert result2["title"] == "127_0_0_1" + assert result2["options"] == { CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION, CONF_STREAM_SOURCE: "rtsp://user:pass@127.0.0.1/testurl/2", CONF_USERNAME: "fred_flintstone", CONF_PASSWORD: "bambam", - CONF_LIMIT_REFETCH_TO_URL_CHANGE: False, CONF_CONTENT_TYPE: "image/jpeg", - CONF_FRAMERATE: 5, + CONF_FRAMERATE: 5.0, CONF_VERIFY_SSL: False, } - await hass.async_block_till_done() - with patch( "homeassistant.components.camera._async_get_stream_image", return_value=fakeimgbytes_jpg, ): image_obj = await async_get_image(hass, "camera.127_0_0_1") assert image_obj.content == fakeimgbytes_jpg - assert len(mock_setup.mock_calls) == 1 async def test_form_still_and_stream_not_provided( @@ -512,7 +523,6 @@ async def test_form_image_http_exceptions( user_flow["flow_id"], TESTDATA, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == expected_message @@ -531,7 +541,6 @@ async def test_form_stream_invalidimage( user_flow["flow_id"], TESTDATA, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"still_image_url": "invalid_still_image"} @@ -550,7 +559,6 @@ async def test_form_stream_invalidimage2( user_flow["flow_id"], TESTDATA, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"still_image_url": "unable_still_load_no_image"} @@ -569,7 +577,6 @@ async def test_form_stream_invalidimage3( user_flow["flow_id"], TESTDATA, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"still_image_url": "invalid_still_image"} @@ -585,6 +592,8 @@ async def test_form_stream_timeout( "homeassistant.components.generic.config_flow.create_stream" ) as create_stream: create_stream.return_value.start = AsyncMock() + create_stream.return_value.stop = AsyncMock() + create_stream.return_value.hass = hass create_stream.return_value.add_provider.return_value.part_recv = AsyncMock() create_stream.return_value.add_provider.return_value.part_recv.return_value = ( False @@ -727,6 +736,37 @@ async def test_form_oserror(hass: HomeAssistant, user_flow: ConfigFlowResult) -> ) +@respx.mock +async def test_form_stream_preview_auto_timeout( + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], + freezer: FrozenDateTimeFactory, + fakeimgbytes_png: bytes, +) -> None: + """Test that the stream preview times out after 10mins.""" + respx.get("http://fred_flintstone:bambam@127.0.0.1/testurl/2").respond( + stream=fakeimgbytes_png + ) + data = TESTDATA.copy() + data.pop(CONF_STILL_IMAGE_URL) + + with mock_create_stream as mock_stream: + result1 = await hass.config_entries.flow.async_configure( + user_flow["flow_id"], + data, + ) + assert result1["type"] is FlowResultType.FORM + assert result1["step_id"] == "user_confirm" + + freezer.tick(600 + 12) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_str = mock_stream.return_value + mock_str.start.assert_awaited_once() + + @respx.mock async def test_options_template_error( hass: HomeAssistant, fakeimgbytes_png: bytes, mock_create_stream: _patch[MagicMock] @@ -842,7 +882,6 @@ async def test_options_only_stream( ) mock_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() result = await hass.config_entries.options.async_init(mock_entry.entry_id) assert result["type"] is FlowResultType.FORM @@ -864,6 +903,27 @@ async def test_options_only_stream( assert result3["data"][CONF_CONTENT_TYPE] == "image/jpeg" +@respx.mock +@pytest.mark.usefixtures("fakeimg_png") +async def test_form_options_stream_worker_error( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test we handle a StreamWorkerError and pass the message through.""" + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + with patch( + "homeassistant.components.generic.config_flow.create_stream", + side_effect=StreamWorkerError("Some message"), + ): + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + TESTDATA, + ) + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"stream_source": "unknown_with_details"} + assert result2["description_placeholders"] == {"error": "Some message"} + + @pytest.mark.usefixtures("fakeimg_png") async def test_unload_entry(hass: HomeAssistant) -> None: """Test unloading the generic IP Camera entry.""" From 93c0eb73d29540afc24ee6fd8ebb68e14f269498 Mon Sep 17 00:00:00 2001 From: jesperraemaekers <146726232+jesperraemaekers@users.noreply.github.com> Date: Sun, 22 Dec 2024 17:44:15 +0100 Subject: [PATCH 622/677] Bump Weheat to 2024.12.22 (#133796) --- homeassistant/components/weheat/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/weheat/manifest.json b/homeassistant/components/weheat/manifest.json index 61d6a110dbd..1c6242de29c 100644 --- a/homeassistant/components/weheat/manifest.json +++ b/homeassistant/components/weheat/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/weheat", "iot_class": "cloud_polling", - "requirements": ["weheat==2024.11.26"] + "requirements": ["weheat==2024.12.22"] } diff --git a/requirements_all.txt b/requirements_all.txt index 56255fc997e..c0f40a189d1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3006,7 +3006,7 @@ webio-api==0.1.11 webmin-xmlrpc==0.0.2 # homeassistant.components.weheat -weheat==2024.11.26 +weheat==2024.12.22 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d80ad1320f5..928a887b08b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2410,7 +2410,7 @@ webio-api==0.1.11 webmin-xmlrpc==0.0.2 # homeassistant.components.weheat -weheat==2024.11.26 +weheat==2024.12.22 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 From 6179da43215d94b38a91f802acd79af69b61f628 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 07:16:47 -1000 Subject: [PATCH 623/677] Bump PySwitchbot to 0.55.3 (#133812) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/switchbot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 5c91a6e20a5..3153e181af9 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.55.2"] + "requirements": ["PySwitchbot==0.55.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index c0f40a189d1..41f0515c83f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.55.2 +PySwitchbot==0.55.3 # homeassistant.components.switchmate PySwitchmate==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 928a887b08b..4eaa5cdadec 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.55.2 +PySwitchbot==0.55.3 # homeassistant.components.syncthru PySyncThru==0.7.10 From 0ba32e1d3ad45b7189d4ff05a70a6f27b2531de5 Mon Sep 17 00:00:00 2001 From: Steven Looman Date: Sun, 22 Dec 2024 18:18:05 +0100 Subject: [PATCH 624/677] Bump async-upnp-client to 0.42.0 (#133806) --- homeassistant/components/dlna_dmr/manifest.json | 2 +- homeassistant/components/dlna_dms/manifest.json | 2 +- homeassistant/components/samsungtv/manifest.json | 2 +- homeassistant/components/ssdp/manifest.json | 2 +- homeassistant/components/upnp/manifest.json | 2 +- homeassistant/components/yeelight/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/dlna_dmr/manifest.json b/homeassistant/components/dlna_dmr/manifest.json index 84024d5bde1..af16379e9c9 100644 --- a/homeassistant/components/dlna_dmr/manifest.json +++ b/homeassistant/components/dlna_dmr/manifest.json @@ -8,7 +8,7 @@ "documentation": "https://www.home-assistant.io/integrations/dlna_dmr", "iot_class": "local_push", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.41.0", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.42.0", "getmac==0.9.4"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", diff --git a/homeassistant/components/dlna_dms/manifest.json b/homeassistant/components/dlna_dms/manifest.json index 1913bb9d5d7..ac5bf3719e3 100644 --- a/homeassistant/components/dlna_dms/manifest.json +++ b/homeassistant/components/dlna_dms/manifest.json @@ -7,7 +7,7 @@ "dependencies": ["ssdp"], "documentation": "https://www.home-assistant.io/integrations/dlna_dms", "iot_class": "local_polling", - "requirements": ["async-upnp-client==0.41.0"], + "requirements": ["async-upnp-client==0.42.0"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaServer:1", diff --git a/homeassistant/components/samsungtv/manifest.json b/homeassistant/components/samsungtv/manifest.json index 1a6b5ed5313..a1fda25589e 100644 --- a/homeassistant/components/samsungtv/manifest.json +++ b/homeassistant/components/samsungtv/manifest.json @@ -39,7 +39,7 @@ "samsungctl[websocket]==0.7.1", "samsungtvws[async,encrypted]==2.7.2", "wakeonlan==2.1.0", - "async-upnp-client==0.41.0" + "async-upnp-client==0.42.0" ], "ssdp": [ { diff --git a/homeassistant/components/ssdp/manifest.json b/homeassistant/components/ssdp/manifest.json index e9d4f57d5fb..2632e37aa98 100644 --- a/homeassistant/components/ssdp/manifest.json +++ b/homeassistant/components/ssdp/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["async_upnp_client"], "quality_scale": "internal", - "requirements": ["async-upnp-client==0.41.0"] + "requirements": ["async-upnp-client==0.42.0"] } diff --git a/homeassistant/components/upnp/manifest.json b/homeassistant/components/upnp/manifest.json index b0b4fe35b39..08e0be2d712 100644 --- a/homeassistant/components/upnp/manifest.json +++ b/homeassistant/components/upnp/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.41.0", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.42.0", "getmac==0.9.4"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:InternetGatewayDevice:1" diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index 4da2e0cfc3e..eba970dc2db 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -16,7 +16,7 @@ }, "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], - "requirements": ["yeelight==0.7.14", "async-upnp-client==0.41.0"], + "requirements": ["yeelight==0.7.14", "async-upnp-client==0.42.0"], "zeroconf": [ { "type": "_miio._udp.local.", diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index bfa479b9c13..d5731041d08 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -10,7 +10,7 @@ aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 async-interrupt==1.2.0 -async-upnp-client==0.41.0 +async-upnp-client==0.42.0 atomicwrites-homeassistant==1.4.1 attrs==24.2.0 audioop-lts==0.2.1;python_version>='3.13' diff --git a/requirements_all.txt b/requirements_all.txt index 41f0515c83f..2381e18a42d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -499,7 +499,7 @@ asmog==0.0.6 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.41.0 +async-upnp-client==0.42.0 # homeassistant.components.arve asyncarve==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4eaa5cdadec..db166894f00 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -463,7 +463,7 @@ arcam-fmj==1.5.2 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.41.0 +async-upnp-client==0.42.0 # homeassistant.components.arve asyncarve==0.1.1 From feca7c28cfd056aa0b5de715d92c5cc023aa8be6 Mon Sep 17 00:00:00 2001 From: "Barry vd. Heuvel" Date: Sun, 22 Dec 2024 18:45:33 +0100 Subject: [PATCH 625/677] Add Compressor, Inside Unit and Energy Output fields to Weheat (#129632) --- homeassistant/components/weheat/icons.json | 6 + homeassistant/components/weheat/sensor.py | 30 +++- homeassistant/components/weheat/strings.json | 9 ++ tests/components/weheat/conftest.py | 3 + .../weheat/snapshots/test_sensor.ambr | 151 ++++++++++++++++++ tests/components/weheat/test_sensor.py | 2 +- 6 files changed, 199 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/weheat/icons.json b/homeassistant/components/weheat/icons.json index 6fdae84cfff..7efd13b0dfb 100644 --- a/homeassistant/components/weheat/icons.json +++ b/homeassistant/components/weheat/icons.json @@ -27,6 +27,12 @@ }, "electricity_used": { "default": "mdi:flash" + }, + "compressor_rpm": { + "default": "mdi:fan" + }, + "compressor_percentage": { + "default": "mdi:fan" } } } diff --git a/homeassistant/components/weheat/sensor.py b/homeassistant/components/weheat/sensor.py index ef5be9030b9..3e5d9376c34 100644 --- a/homeassistant/components/weheat/sensor.py +++ b/homeassistant/components/weheat/sensor.py @@ -11,7 +11,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfTemperature +from homeassistant.const import ( + PERCENTAGE, + REVOLUTIONS_PER_MINUTE, + UnitOfEnergy, + UnitOfPower, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -142,6 +148,28 @@ SENSORS = [ state_class=SensorStateClass.TOTAL_INCREASING, value_fn=lambda status: status.energy_total, ), + WeHeatSensorEntityDescription( + translation_key="energy_output", + key="energy_output", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda status: status.energy_output, + ), + WeHeatSensorEntityDescription( + translation_key="compressor_rpm", + key="compressor_rpm", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, + value_fn=lambda status: status.compressor_rpm, + ), + WeHeatSensorEntityDescription( + translation_key="compressor_percentage", + key="compressor_percentage", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda status: status.compressor_percentage, + ), ] diff --git a/homeassistant/components/weheat/strings.json b/homeassistant/components/weheat/strings.json index 0733024cbed..c993a6beefe 100644 --- a/homeassistant/components/weheat/strings.json +++ b/homeassistant/components/weheat/strings.json @@ -84,6 +84,15 @@ }, "electricity_used": { "name": "Electricity used" + }, + "energy_output": { + "name": "Total energy output" + }, + "compressor_rpm": { + "name": "Compressor speed" + }, + "compressor_percentage": { + "name": "Compressor usage" } } } diff --git a/tests/components/weheat/conftest.py b/tests/components/weheat/conftest.py index 6ecb64ffdf4..7169a3b56c8 100644 --- a/tests/components/weheat/conftest.py +++ b/tests/components/weheat/conftest.py @@ -121,6 +121,9 @@ def mock_weheat_heat_pump_instance() -> MagicMock: mock_heat_pump_instance.cop = 4.5 mock_heat_pump_instance.heat_pump_state = HeatPump.State.HEATING mock_heat_pump_instance.energy_total = 12345 + mock_heat_pump_instance.energy_output = 56789 + mock_heat_pump_instance.compressor_rpm = 4500 + mock_heat_pump_instance.compressor_percentage = 100 return mock_heat_pump_instance diff --git a/tests/components/weheat/snapshots/test_sensor.ambr b/tests/components/weheat/snapshots/test_sensor.ambr index 3bd4a254598..1a54711d6c5 100644 --- a/tests/components/weheat/snapshots/test_sensor.ambr +++ b/tests/components/weheat/snapshots/test_sensor.ambr @@ -123,6 +123,106 @@ 'state': '33', }) # --- +# name: test_all_entities[sensor.test_model_compressor_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_compressor_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Compressor speed', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'compressor_rpm', + 'unique_id': '0000-1111-2222-3333_compressor_rpm', + 'unit_of_measurement': 'rpm', + }) +# --- +# name: test_all_entities[sensor.test_model_compressor_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Model Compressor speed', + 'state_class': , + 'unit_of_measurement': 'rpm', + }), + 'context': , + 'entity_id': 'sensor.test_model_compressor_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4500', + }) +# --- +# name: test_all_entities[sensor.test_model_compressor_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_compressor_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Compressor usage', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'compressor_percentage', + 'unique_id': '0000-1111-2222-3333_compressor_percentage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.test_model_compressor_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Model Compressor usage', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_model_compressor_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- # name: test_all_entities[sensor.test_model_cop-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -604,6 +704,57 @@ 'state': '21', }) # --- +# name: test_all_entities[sensor.test_model_total_energy_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_model_total_energy_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total energy output', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_output', + 'unique_id': '0000-1111-2222-3333_energy_output', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.test_model_total_energy_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Test Model Total energy output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_model_total_energy_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '56789', + }) +# --- # name: test_all_entities[sensor.test_model_water_inlet_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/weheat/test_sensor.py b/tests/components/weheat/test_sensor.py index d9055addc67..062b84d0423 100644 --- a/tests/components/weheat/test_sensor.py +++ b/tests/components/weheat/test_sensor.py @@ -34,7 +34,7 @@ async def test_all_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -@pytest.mark.parametrize(("has_dhw", "nr_of_entities"), [(False, 12), (True, 14)]) +@pytest.mark.parametrize(("has_dhw", "nr_of_entities"), [(False, 15), (True, 17)]) async def test_create_entities( hass: HomeAssistant, mock_weheat_discover: AsyncMock, From 0e9965150e7cc0fad81531b1bff84355b524608a Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 22 Dec 2024 19:00:49 +0100 Subject: [PATCH 626/677] Show device name in Twinkly discovery (#133814) --- homeassistant/components/twinkly/config_flow.py | 3 +++ homeassistant/components/twinkly/strings.json | 1 + 2 files changed, 4 insertions(+) diff --git a/homeassistant/components/twinkly/config_flow.py b/homeassistant/components/twinkly/config_flow.py index 4dec8809f07..53ba8f084c3 100644 --- a/homeassistant/components/twinkly/config_flow.py +++ b/homeassistant/components/twinkly/config_flow.py @@ -80,6 +80,9 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): return self._create_entry_from_device(device_info, host) self._set_confirm_only() + self.context["title_placeholders"] = { + "name": device_info[DEV_NAME], + } placeholders = { "model": device_info[DEV_MODEL], "name": device_info[DEV_NAME], diff --git a/homeassistant/components/twinkly/strings.json b/homeassistant/components/twinkly/strings.json index d27de8a75de..bbc3d67373d 100644 --- a/homeassistant/components/twinkly/strings.json +++ b/homeassistant/components/twinkly/strings.json @@ -1,5 +1,6 @@ { "config": { + "flow_title": "{name}", "step": { "user": { "data": { From d9948847260d22f25d7c144c10d09ba02a91d4f4 Mon Sep 17 00:00:00 2001 From: "Barry vd. Heuvel" Date: Sun, 22 Dec 2024 19:07:01 +0100 Subject: [PATCH 627/677] Add binary states for Weheat indoor unit (#133811) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/weheat/__init__.py | 2 +- .../components/weheat/binary_sensor.py | 100 ++++++++++ homeassistant/components/weheat/icons.json | 17 ++ homeassistant/components/weheat/strings.json | 17 ++ tests/components/weheat/conftest.py | 5 + .../weheat/snapshots/test_binary_sensor.ambr | 188 ++++++++++++++++++ tests/components/weheat/test_binary_sensor.py | 52 +++++ 7 files changed, 380 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/weheat/binary_sensor.py create mode 100644 tests/components/weheat/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/weheat/test_binary_sensor.py diff --git a/homeassistant/components/weheat/__init__.py b/homeassistant/components/weheat/__init__.py index d924d6ceaab..a043a3a6845 100644 --- a/homeassistant/components/weheat/__init__.py +++ b/homeassistant/components/weheat/__init__.py @@ -17,7 +17,7 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( from .const import API_URL, LOGGER from .coordinator import WeheatDataUpdateCoordinator -PLATFORMS: list[Platform] = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] type WeheatConfigEntry = ConfigEntry[list[WeheatDataUpdateCoordinator]] diff --git a/homeassistant/components/weheat/binary_sensor.py b/homeassistant/components/weheat/binary_sensor.py new file mode 100644 index 00000000000..ea939227e77 --- /dev/null +++ b/homeassistant/components/weheat/binary_sensor.py @@ -0,0 +1,100 @@ +"""Binary sensor platform for Weheat integration.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from weheat.abstractions.heat_pump import HeatPump + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import WeheatConfigEntry +from .coordinator import WeheatDataUpdateCoordinator +from .entity import WeheatEntity + + +@dataclass(frozen=True, kw_only=True) +class WeHeatBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes Weheat binary sensor entity.""" + + value_fn: Callable[[HeatPump], StateType] + + +BINARY_SENSORS = [ + WeHeatBinarySensorEntityDescription( + translation_key="indoor_unit_water_pump_state", + key="indoor_unit_water_pump_state", + device_class=BinarySensorDeviceClass.RUNNING, + value_fn=lambda status: status.indoor_unit_water_pump_state, + ), + WeHeatBinarySensorEntityDescription( + translation_key="indoor_unit_auxiliary_pump_state", + key="indoor_unit_auxiliary_pump_state", + device_class=BinarySensorDeviceClass.RUNNING, + value_fn=lambda status: status.indoor_unit_auxiliary_pump_state, + ), + WeHeatBinarySensorEntityDescription( + translation_key="indoor_unit_dhw_valve_or_pump_state", + key="indoor_unit_dhw_valve_or_pump_state", + device_class=BinarySensorDeviceClass.RUNNING, + value_fn=lambda status: status.indoor_unit_dhw_valve_or_pump_state, + ), + WeHeatBinarySensorEntityDescription( + translation_key="indoor_unit_gas_boiler_state", + key="indoor_unit_gas_boiler_state", + value_fn=lambda status: status.indoor_unit_gas_boiler_state, + ), + WeHeatBinarySensorEntityDescription( + translation_key="indoor_unit_electric_heater_state", + key="indoor_unit_electric_heater_state", + device_class=BinarySensorDeviceClass.RUNNING, + value_fn=lambda status: status.indoor_unit_electric_heater_state, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: WeheatConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the sensors for weheat heat pump.""" + entities = [ + WeheatHeatPumpBinarySensor(coordinator, entity_description) + for entity_description in BINARY_SENSORS + for coordinator in entry.runtime_data + if entity_description.value_fn(coordinator.data) is not None + ] + + async_add_entities(entities) + + +class WeheatHeatPumpBinarySensor(WeheatEntity, BinarySensorEntity): + """Defines a Weheat heat pump binary sensor.""" + + coordinator: WeheatDataUpdateCoordinator + entity_description: WeHeatBinarySensorEntityDescription + + def __init__( + self, + coordinator: WeheatDataUpdateCoordinator, + entity_description: WeHeatBinarySensorEntityDescription, + ) -> None: + """Pass coordinator to CoordinatorEntity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + + self._attr_unique_id = f"{coordinator.heatpump_id}_{entity_description.key}" + + @property + def is_on(self) -> bool | None: + """Return True if the binary sensor is on.""" + value = self.entity_description.value_fn(self.coordinator.data) + return bool(value) if value is not None else None diff --git a/homeassistant/components/weheat/icons.json b/homeassistant/components/weheat/icons.json index 7efd13b0dfb..e7f54b478c6 100644 --- a/homeassistant/components/weheat/icons.json +++ b/homeassistant/components/weheat/icons.json @@ -1,5 +1,22 @@ { "entity": { + "binary_sensor": { + "indoor_unit_water_pump_state": { + "default": "mdi:pump" + }, + "indoor_unit_auxiliary_pump_state": { + "default": "mdi:pump" + }, + "indoor_unit_dhw_valve_or_pump_state": { + "default": "mdi:pump" + }, + "indoor_unit_gas_boiler_state": { + "default": "mdi:toggle-switch" + }, + "indoor_unit_electric_heater_state": { + "default": "mdi:heating-coil" + } + }, "sensor": { "power_output": { "default": "mdi:heat-wave" diff --git a/homeassistant/components/weheat/strings.json b/homeassistant/components/weheat/strings.json index c993a6beefe..2a208c2f8ca 100644 --- a/homeassistant/components/weheat/strings.json +++ b/homeassistant/components/weheat/strings.json @@ -32,6 +32,23 @@ } }, "entity": { + "binary_sensor": { + "indoor_unit_water_pump_state": { + "name": "Indoor unit water pump" + }, + "indoor_unit_auxiliary_pump_state": { + "name": "Indoor unit auxilary water pump" + }, + "indoor_unit_dhw_valve_or_pump_state": { + "name": "Indoor unit DHW valve or water pump" + }, + "indoor_unit_gas_boiler_state": { + "name": "Indoor unit gas boiler heating allowed" + }, + "indoor_unit_electric_heater_state": { + "name": "Indoor unit electric heater" + } + }, "sensor": { "power_output": { "name": "Output power" diff --git a/tests/components/weheat/conftest.py b/tests/components/weheat/conftest.py index 7169a3b56c8..1bbe91fc573 100644 --- a/tests/components/weheat/conftest.py +++ b/tests/components/weheat/conftest.py @@ -124,6 +124,11 @@ def mock_weheat_heat_pump_instance() -> MagicMock: mock_heat_pump_instance.energy_output = 56789 mock_heat_pump_instance.compressor_rpm = 4500 mock_heat_pump_instance.compressor_percentage = 100 + mock_heat_pump_instance.indoor_unit_water_pump_state = False + mock_heat_pump_instance.indoor_unit_auxiliary_pump_state = False + mock_heat_pump_instance.indoor_unit_dhw_valve_or_pump_state = None + mock_heat_pump_instance.indoor_unit_gas_boiler_state = False + mock_heat_pump_instance.indoor_unit_electric_heater_state = True return mock_heat_pump_instance diff --git a/tests/components/weheat/snapshots/test_binary_sensor.ambr b/tests/components/weheat/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..08d609ca610 --- /dev/null +++ b/tests/components/weheat/snapshots/test_binary_sensor.ambr @@ -0,0 +1,188 @@ +# serializer version: 1 +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_auxilary_water_pump-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_model_indoor_unit_auxilary_water_pump', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Indoor unit auxilary water pump', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'indoor_unit_auxiliary_pump_state', + 'unique_id': '0000-1111-2222-3333_indoor_unit_auxiliary_pump_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_auxilary_water_pump-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Test Model Indoor unit auxilary water pump', + }), + 'context': , + 'entity_id': 'binary_sensor.test_model_indoor_unit_auxilary_water_pump', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_electric_heater-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_model_indoor_unit_electric_heater', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Indoor unit electric heater', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'indoor_unit_electric_heater_state', + 'unique_id': '0000-1111-2222-3333_indoor_unit_electric_heater_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_electric_heater-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Test Model Indoor unit electric heater', + }), + 'context': , + 'entity_id': 'binary_sensor.test_model_indoor_unit_electric_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_gas_boiler_heating_allowed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_model_indoor_unit_gas_boiler_heating_allowed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Indoor unit gas boiler heating allowed', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'indoor_unit_gas_boiler_state', + 'unique_id': '0000-1111-2222-3333_indoor_unit_gas_boiler_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_gas_boiler_heating_allowed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Model Indoor unit gas boiler heating allowed', + }), + 'context': , + 'entity_id': 'binary_sensor.test_model_indoor_unit_gas_boiler_heating_allowed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_water_pump-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_model_indoor_unit_water_pump', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Indoor unit water pump', + 'platform': 'weheat', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'indoor_unit_water_pump_state', + 'unique_id': '0000-1111-2222-3333_indoor_unit_water_pump_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_entities[binary_sensor.test_model_indoor_unit_water_pump-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Test Model Indoor unit water pump', + }), + 'context': , + 'entity_id': 'binary_sensor.test_model_indoor_unit_water_pump', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/weheat/test_binary_sensor.py b/tests/components/weheat/test_binary_sensor.py new file mode 100644 index 00000000000..e75cb282e24 --- /dev/null +++ b/tests/components/weheat/test_binary_sensor.py @@ -0,0 +1,52 @@ +"""Tests for the weheat sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion +from weheat.abstractions.discovery import HeatPumpDiscovery + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_weheat_discover: AsyncMock, + mock_weheat_heat_pump: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.weheat.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_create_binary_entities( + hass: HomeAssistant, + mock_weheat_discover: AsyncMock, + mock_weheat_heat_pump: AsyncMock, + mock_heat_pump_info: HeatPumpDiscovery.HeatPumpInfo, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test creating entities.""" + mock_weheat_discover.return_value = [mock_heat_pump_info] + + with patch("homeassistant.components.weheat.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 4 From 84a3a9d495e80ccbacd0d601c9153c0ede718a2e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 19:25:55 +0100 Subject: [PATCH 628/677] Add select error handling for Peblar Rocksolid EV Chargers (#133804) --- homeassistant/components/peblar/select.py | 2 + tests/components/peblar/test_select.py | 155 +++++++++++++++++++++- 2 files changed, 154 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/peblar/select.py b/homeassistant/components/peblar/select.py index e9c7da77bec..a2a0997a797 100644 --- a/homeassistant/components/peblar/select.py +++ b/homeassistant/components/peblar/select.py @@ -15,6 +15,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .coordinator import PeblarConfigEntry, PeblarUserConfigurationDataUpdateCoordinator from .entity import PeblarEntity +from .helpers import peblar_exception_handler PARALLEL_UPDATES = 1 @@ -74,6 +75,7 @@ class PeblarSelectEntity( """Return the selected entity option to represent the entity state.""" return self.entity_description.current_fn(self.coordinator.data) + @peblar_exception_handler async def async_select_option(self, option: str) -> None: """Change the selected option.""" await self.entity_description.select_fn(self.coordinator.peblar, option) diff --git a/tests/components/peblar/test_select.py b/tests/components/peblar/test_select.py index e20d84da755..5e4ab4609d4 100644 --- a/tests/components/peblar/test_select.py +++ b/tests/components/peblar/test_select.py @@ -1,18 +1,36 @@ """Tests for the Peblar select platform.""" +from unittest.mock import MagicMock + +from peblar import ( + PeblarAuthenticationError, + PeblarConnectionError, + PeblarError, + SmartChargingMode, +) import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.peblar.const import DOMAIN -from homeassistant.const import Platform +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +pytestmark = [ + pytest.mark.parametrize("init_integration", [Platform.SELECT], indirect=True), + pytest.mark.usefixtures("init_integration"), +] + -@pytest.mark.parametrize("init_integration", [Platform.SELECT], indirect=True) -@pytest.mark.usefixtures("init_integration") async def test_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -33,3 +51,134 @@ async def test_entities( ) for entity_entry in entity_entries: assert entity_entry.device_id == device_entry.id + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_select_option( + hass: HomeAssistant, + mock_peblar: MagicMock, +) -> None: + """Test the Peblar EV charger selects.""" + entity_id = "select.peblar_ev_charger_smart_charging" + mocked_method = mock_peblar.smart_charging + mocked_method.reset_mock() + + # Test normal happy path for changing the select option + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: "default", + }, + blocking=True, + ) + + assert len(mocked_method.mock_calls) == 1 + mocked_method.assert_called_with(SmartChargingMode.DEFAULT) + + +@pytest.mark.parametrize( + ("error", "error_match", "translation_key", "translation_placeholders"), + [ + ( + PeblarConnectionError("Could not connect"), + ( + r"An error occurred while communicating " + r"with the Peblar device: Could not connect" + ), + "communication_error", + {"error": "Could not connect"}, + ), + ( + PeblarError("Unknown error"), + ( + r"An unknown error occurred while communicating " + r"with the Peblar device: Unknown error" + ), + "unknown_error", + {"error": "Unknown error"}, + ), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_select_option_communication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, + error: Exception, + error_match: str, + translation_key: str, + translation_placeholders: dict, +) -> None: + """Test the Peblar EV charger when a communication error occurs.""" + entity_id = "select.peblar_ev_charger_smart_charging" + mock_peblar.smart_charging.side_effect = error + + with pytest.raises( + HomeAssistantError, + match=error_match, + ) as excinfo: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: "default", + }, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == translation_key + assert excinfo.value.translation_placeholders == translation_placeholders + + +async def test_select_option_authentication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Peblar EV charger when an authentication error occurs.""" + entity_id = "select.peblar_ev_charger_smart_charging" + mock_peblar.smart_charging.side_effect = PeblarAuthenticationError( + "Authentication error" + ) + mock_peblar.login.side_effect = PeblarAuthenticationError("Authentication error") + + with pytest.raises( + HomeAssistantError, + match=( + r"An authentication failure occurred while communicating " + r"with the Peblar device" + ), + ) as excinfo: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: "default", + }, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "authentication_error" + assert not excinfo.value.translation_placeholders + + # Ensure the device is reloaded on authentication error and triggers + # a reauthentication flow. + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id From a2aba77973acd4de93b3cee354b19ae951606933 Mon Sep 17 00:00:00 2001 From: Simon <80467011+sorgfresser@users.noreply.github.com> Date: Sun, 22 Dec 2024 18:26:15 +0000 Subject: [PATCH 629/677] Fix typo in ElevenLabs (#133819) --- homeassistant/components/elevenlabs/__init__.py | 10 ++++------ homeassistant/components/elevenlabs/config_flow.py | 6 +++--- homeassistant/components/elevenlabs/tts.py | 4 ++-- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/elevenlabs/__init__.py b/homeassistant/components/elevenlabs/__init__.py index 84b2b61b8ed..e8a378d56c6 100644 --- a/homeassistant/components/elevenlabs/__init__.py +++ b/homeassistant/components/elevenlabs/__init__.py @@ -35,10 +35,10 @@ class ElevenLabsData: model: Model -type EleventLabsConfigEntry = ConfigEntry[ElevenLabsData] +type ElevenLabsConfigEntry = ConfigEntry[ElevenLabsData] -async def async_setup_entry(hass: HomeAssistant, entry: EleventLabsConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -> bool: """Set up ElevenLabs text-to-speech from a config entry.""" entry.add_update_listener(update_listener) httpx_client = get_async_client(hass) @@ -60,15 +60,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: EleventLabsConfigEntry) return True -async def async_unload_entry( - hass: HomeAssistant, entry: EleventLabsConfigEntry -) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) async def update_listener( - hass: HomeAssistant, config_entry: EleventLabsConfigEntry + hass: HomeAssistant, config_entry: ElevenLabsConfigEntry ) -> None: """Handle options update.""" await hass.config_entries.async_reload(config_entry.entry_id) diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py index 60df79d6eaa..227749bf82c 100644 --- a/homeassistant/components/elevenlabs/config_flow.py +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -19,7 +19,7 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, ) -from . import EleventLabsConfigEntry +from . import ElevenLabsConfigEntry from .const import ( CONF_CONFIGURE_VOICE, CONF_MODEL, @@ -92,7 +92,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod def async_get_options_flow( - config_entry: EleventLabsConfigEntry, + config_entry: ElevenLabsConfigEntry, ) -> OptionsFlow: """Create the options flow.""" return ElevenLabsOptionsFlow(config_entry) @@ -101,7 +101,7 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): class ElevenLabsOptionsFlow(OptionsFlow): """ElevenLabs options flow.""" - def __init__(self, config_entry: EleventLabsConfigEntry) -> None: + def __init__(self, config_entry: ElevenLabsConfigEntry) -> None: """Initialize options flow.""" self.api_key: str = config_entry.data[CONF_API_KEY] # id -> name diff --git a/homeassistant/components/elevenlabs/tts.py b/homeassistant/components/elevenlabs/tts.py index c96a7161b72..b89e966593f 100644 --- a/homeassistant/components/elevenlabs/tts.py +++ b/homeassistant/components/elevenlabs/tts.py @@ -22,7 +22,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import EleventLabsConfigEntry +from . import ElevenLabsConfigEntry from .const import ( CONF_OPTIMIZE_LATENCY, CONF_SIMILARITY, @@ -56,7 +56,7 @@ def to_voice_settings(options: MappingProxyType[str, Any]) -> VoiceSettings: async def async_setup_entry( hass: HomeAssistant, - config_entry: EleventLabsConfigEntry, + config_entry: ElevenLabsConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up ElevenLabs tts platform via config entry.""" From c6789d70a4e2d5a01d3938277577115c5e931bfd Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sun, 22 Dec 2024 19:26:35 +0100 Subject: [PATCH 630/677] Remove unneeded type for enphase_envoy coordinator in async_unload_entry (#133817) --- homeassistant/components/enphase_envoy/__init__.py | 2 +- homeassistant/components/enphase_envoy/quality_scale.yaml | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/enphase_envoy/__init__.py b/homeassistant/components/enphase_envoy/__init__.py index f4fe4aff2cb..cdbb7080674 100644 --- a/homeassistant/components/enphase_envoy/__init__.py +++ b/homeassistant/components/enphase_envoy/__init__.py @@ -77,7 +77,7 @@ async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: async def async_unload_entry(hass: HomeAssistant, entry: EnphaseConfigEntry) -> bool: """Unload a config entry.""" - coordinator: EnphaseUpdateCoordinator = entry.runtime_data + coordinator = entry.runtime_data coordinator.async_cancel_token_refresh() return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/enphase_envoy/quality_scale.yaml b/homeassistant/components/enphase_envoy/quality_scale.yaml index 210491c031c..a7038b4e0da 100644 --- a/homeassistant/components/enphase_envoy/quality_scale.yaml +++ b/homeassistant/components/enphase_envoy/quality_scale.yaml @@ -35,11 +35,7 @@ rules: comment: no events used. entity-unique-id: done has-entity-name: done - runtime-data: - status: done - comment: | - async_unload_entry- coordinator: EnphaseUpdateCoordinator = entry.runtime_data - You can remove the EnphaseUpdateCoordinator as the type can now be inferred thanks to the typed config entry + runtime-data: done test-before-configure: done test-before-setup: done unique-config-entry: done From de5a49363e20b04bf459b9fb221e6e4adbf52463 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 19:47:31 +0100 Subject: [PATCH 631/677] Add switch error handling for Peblar Rocksolid EV Chargers (#133805) --- homeassistant/components/peblar/switch.py | 3 + tests/components/peblar/test_switch.py | 154 +++++++++++++++++++++- 2 files changed, 154 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/peblar/switch.py b/homeassistant/components/peblar/switch.py index 88f52d01e3a..e56c2fcdaec 100644 --- a/homeassistant/components/peblar/switch.py +++ b/homeassistant/components/peblar/switch.py @@ -20,6 +20,7 @@ from .coordinator import ( PeblarRuntimeData, ) from .entity import PeblarEntity +from .helpers import peblar_exception_handler PARALLEL_UPDATES = 1 @@ -78,11 +79,13 @@ class PeblarSwitchEntity( """Return state of the switch.""" return self.entity_description.is_on_fn(self.coordinator.data) + @peblar_exception_handler async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" await self.entity_description.set_fn(self.coordinator.api, True) await self.coordinator.async_request_refresh() + @peblar_exception_handler async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" await self.entity_description.set_fn(self.coordinator.api, False) diff --git a/tests/components/peblar/test_switch.py b/tests/components/peblar/test_switch.py index 7a8fcf7705b..6436ac78109 100644 --- a/tests/components/peblar/test_switch.py +++ b/tests/components/peblar/test_switch.py @@ -1,18 +1,31 @@ """Tests for the Peblar switch platform.""" +from unittest.mock import MagicMock + +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.peblar.const import DOMAIN -from homeassistant.const import Platform +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +pytestmark = [ + pytest.mark.parametrize("init_integration", [Platform.SWITCH], indirect=True), + pytest.mark.usefixtures("init_integration"), +] + -@pytest.mark.parametrize("init_integration", [Platform.SWITCH], indirect=True) -@pytest.mark.usefixtures("init_integration") async def test_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -33,3 +46,138 @@ async def test_entities( ) for entity_entry in entity_entries: assert entity_entry.device_id == device_entry.id + + +@pytest.mark.parametrize( + ("service", "force_single_phase"), + [ + (SERVICE_TURN_ON, True), + (SERVICE_TURN_OFF, False), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_switch( + hass: HomeAssistant, + mock_peblar: MagicMock, + service: str, + force_single_phase: bool, +) -> None: + """Test the Peblar EV charger switches.""" + entity_id = "switch.peblar_ev_charger_force_single_phase" + mocked_method = mock_peblar.rest_api.return_value.ev_interface + mocked_method.reset_mock() + + # Test normal happy path for changing the switch state + await hass.services.async_call( + SWITCH_DOMAIN, + service, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert len(mocked_method.mock_calls) == 2 + mocked_method.mock_calls[0].assert_called_with( + {"force_single_phase": force_single_phase} + ) + + +@pytest.mark.parametrize( + ("error", "error_match", "translation_key", "translation_placeholders"), + [ + ( + PeblarConnectionError("Could not connect"), + ( + r"An error occurred while communicating " + r"with the Peblar device: Could not connect" + ), + "communication_error", + {"error": "Could not connect"}, + ), + ( + PeblarError("Unknown error"), + ( + r"An unknown error occurred while communicating " + r"with the Peblar device: Unknown error" + ), + "unknown_error", + {"error": "Unknown error"}, + ), + ], +) +@pytest.mark.parametrize("service", [SERVICE_TURN_ON, SERVICE_TURN_OFF]) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_switch_communication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + error: Exception, + error_match: str, + translation_key: str, + translation_placeholders: dict, + service: str, +) -> None: + """Test the Peblar EV charger when a communication error occurs.""" + entity_id = "switch.peblar_ev_charger_force_single_phase" + mock_peblar.rest_api.return_value.ev_interface.side_effect = error + with pytest.raises( + HomeAssistantError, + match=error_match, + ) as excinfo: + await hass.services.async_call( + SWITCH_DOMAIN, + service, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == translation_key + assert excinfo.value.translation_placeholders == translation_placeholders + + +@pytest.mark.parametrize("service", [SERVICE_TURN_ON, SERVICE_TURN_OFF]) +async def test_switch_authentication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, + service: str, +) -> None: + """Test the Peblar EV charger when an authentication error occurs.""" + entity_id = "switch.peblar_ev_charger_force_single_phase" + mock_peblar.rest_api.return_value.ev_interface.side_effect = ( + PeblarAuthenticationError("Authentication error") + ) + mock_peblar.login.side_effect = PeblarAuthenticationError("Authentication error") + + with pytest.raises( + HomeAssistantError, + match=( + r"An authentication failure occurred while communicating " + r"with the Peblar device" + ), + ) as excinfo: + await hass.services.async_call( + SWITCH_DOMAIN, + service, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "authentication_error" + assert not excinfo.value.translation_placeholders + + # Ensure the device is reloaded on authentication error and triggers + # a reauthentication flow. + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id From 0f1835139f04d75d6de56fc8b279e41743180e7f Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sun, 22 Dec 2024 19:53:00 +0100 Subject: [PATCH 632/677] Add number error handling for Peblar Rocksolid EV Chargers (#133803) --- homeassistant/components/peblar/number.py | 2 + tests/components/peblar/test_number.py | 148 +++++++++++++++++++++- 2 files changed, 147 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py index d17ff09eb94..d2983438a91 100644 --- a/homeassistant/components/peblar/number.py +++ b/homeassistant/components/peblar/number.py @@ -24,6 +24,7 @@ from .coordinator import ( PeblarRuntimeData, ) from .entity import PeblarEntity +from .helpers import peblar_exception_handler PARALLEL_UPDATES = 1 @@ -94,6 +95,7 @@ class PeblarNumberEntity( """Return the number value.""" return self.entity_description.value_fn(self.coordinator.data) + @peblar_exception_handler async def async_set_native_value(self, value: float) -> None: """Change to new number value.""" await self.entity_description.set_value_fn(self.coordinator.api, value) diff --git a/tests/components/peblar/test_number.py b/tests/components/peblar/test_number.py index 4c2ff928210..2a8fca46e91 100644 --- a/tests/components/peblar/test_number.py +++ b/tests/components/peblar/test_number.py @@ -1,18 +1,31 @@ """Tests for the Peblar number platform.""" +from unittest.mock import MagicMock + +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.components.peblar.const import DOMAIN -from homeassistant.const import Platform +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, snapshot_platform +pytestmark = [ + pytest.mark.parametrize("init_integration", [Platform.NUMBER], indirect=True), + pytest.mark.usefixtures("init_integration"), +] + -@pytest.mark.parametrize("init_integration", [Platform.NUMBER], indirect=True) -@pytest.mark.usefixtures("init_integration") async def test_entities( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -33,3 +46,132 @@ async def test_entities( ) for entity_entry in entity_entries: assert entity_entry.device_id == device_entry.id + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number_set_value( + hass: HomeAssistant, + mock_peblar: MagicMock, +) -> None: + """Test the Peblar EV charger numbers.""" + entity_id = "number.peblar_ev_charger_charge_limit" + mocked_method = mock_peblar.rest_api.return_value.ev_interface + mocked_method.reset_mock() + + # Test normal happy path number value change + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 10, + }, + blocking=True, + ) + + assert len(mocked_method.mock_calls) == 2 + mocked_method.mock_calls[0].assert_called_with({"charge_current_limit": 10}) + + +@pytest.mark.parametrize( + ("error", "error_match", "translation_key", "translation_placeholders"), + [ + ( + PeblarConnectionError("Could not connect"), + ( + r"An error occurred while communicating " + r"with the Peblar device: Could not connect" + ), + "communication_error", + {"error": "Could not connect"}, + ), + ( + PeblarError("Unknown error"), + ( + r"An unknown error occurred while communicating " + r"with the Peblar device: Unknown error" + ), + "unknown_error", + {"error": "Unknown error"}, + ), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number_set_value_communication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + error: Exception, + error_match: str, + translation_key: str, + translation_placeholders: dict, +) -> None: + """Test the Peblar EV charger when a communication error occurs.""" + entity_id = "number.peblar_ev_charger_charge_limit" + mock_peblar.rest_api.return_value.ev_interface.side_effect = error + + with pytest.raises( + HomeAssistantError, + match=error_match, + ) as excinfo: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 10, + }, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == translation_key + assert excinfo.value.translation_placeholders == translation_placeholders + + +async def test_number_set_value_authentication_error( + hass: HomeAssistant, + mock_peblar: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Peblar EV charger when an authentication error occurs.""" + entity_id = "number.peblar_ev_charger_charge_limit" + mock_peblar.rest_api.return_value.ev_interface.side_effect = ( + PeblarAuthenticationError("Authentication error") + ) + mock_peblar.login.side_effect = PeblarAuthenticationError("Authentication error") + with pytest.raises( + HomeAssistantError, + match=( + r"An authentication failure occurred while communicating " + r"with the Peblar device" + ), + ) as excinfo: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 10, + }, + blocking=True, + ) + + assert excinfo.value.translation_domain == DOMAIN + assert excinfo.value.translation_key == "authentication_error" + assert not excinfo.value.translation_placeholders + + # Ensure the device is reloaded on authentication error and triggers + # a reauthentication flow. + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id From 6c70586f7e82d3d40d92881acd17e51f5b57f129 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Sun, 22 Dec 2024 19:54:14 +0100 Subject: [PATCH 633/677] Add get_user_keyring_info service to UniFi Protect integration (#133138) Co-authored-by: J. Nick Koston --- .../components/unifiprotect/const.py | 7 ++ .../components/unifiprotect/icons.json | 3 + .../components/unifiprotect/services.py | 81 ++++++++++++++++++- .../components/unifiprotect/services.yaml | 7 ++ .../components/unifiprotect/strings.json | 10 +++ .../components/unifiprotect/test_services.py | 68 +++++++++++++++- 6 files changed, 171 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/unifiprotect/const.py b/homeassistant/components/unifiprotect/const.py index d607f87b76a..d041b713125 100644 --- a/homeassistant/components/unifiprotect/const.py +++ b/homeassistant/components/unifiprotect/const.py @@ -83,3 +83,10 @@ EVENT_TYPE_FINGERPRINT_IDENTIFIED: Final = "identified" EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED: Final = "not_identified" EVENT_TYPE_NFC_SCANNED: Final = "scanned" EVENT_TYPE_DOORBELL_RING: Final = "ring" + +KEYRINGS_ULP_ID: Final = "ulp_id" +KEYRINGS_USER_STATUS: Final = "user_status" +KEYRINGS_USER_FULL_NAME: Final = "full_name" +KEYRINGS_KEY_TYPE: Final = "key_type" +KEYRINGS_KEY_TYPE_ID_FINGERPRINT: Final = "fingerprint_id" +KEYRINGS_KEY_TYPE_ID_NFC: Final = "nfc_id" diff --git a/homeassistant/components/unifiprotect/icons.json b/homeassistant/components/unifiprotect/icons.json index 5e80e3095b3..b5e8277d82a 100644 --- a/homeassistant/components/unifiprotect/icons.json +++ b/homeassistant/components/unifiprotect/icons.json @@ -11,6 +11,9 @@ }, "remove_privacy_zone": { "service": "mdi:eye-minus" + }, + "get_user_keyring_info": { + "service": "mdi:key-chain" } } } diff --git a/homeassistant/components/unifiprotect/services.py b/homeassistant/components/unifiprotect/services.py index 35713efdf3d..6a1daef178e 100644 --- a/homeassistant/components/unifiprotect/services.py +++ b/homeassistant/components/unifiprotect/services.py @@ -13,7 +13,13 @@ import voluptuous as vol from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.const import ATTR_DEVICE_ID, ATTR_NAME, Platform -from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, + callback, +) from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import ( config_validation as cv, @@ -21,9 +27,19 @@ from homeassistant.helpers import ( entity_registry as er, ) from homeassistant.helpers.service import async_extract_referenced_entity_ids +from homeassistant.util.json import JsonValueType from homeassistant.util.read_only_dict import ReadOnlyDict -from .const import ATTR_MESSAGE, DOMAIN +from .const import ( + ATTR_MESSAGE, + DOMAIN, + KEYRINGS_KEY_TYPE, + KEYRINGS_KEY_TYPE_ID_FINGERPRINT, + KEYRINGS_KEY_TYPE_ID_NFC, + KEYRINGS_ULP_ID, + KEYRINGS_USER_FULL_NAME, + KEYRINGS_USER_STATUS, +) from .data import async_ufp_instance_for_config_entry_ids SERVICE_ADD_DOORBELL_TEXT = "add_doorbell_text" @@ -31,12 +47,14 @@ SERVICE_REMOVE_DOORBELL_TEXT = "remove_doorbell_text" SERVICE_SET_PRIVACY_ZONE = "set_privacy_zone" SERVICE_REMOVE_PRIVACY_ZONE = "remove_privacy_zone" SERVICE_SET_CHIME_PAIRED = "set_chime_paired_doorbells" +SERVICE_GET_USER_KEYRING_INFO = "get_user_keyring_info" ALL_GLOBAL_SERIVCES = [ SERVICE_ADD_DOORBELL_TEXT, SERVICE_REMOVE_DOORBELL_TEXT, SERVICE_SET_CHIME_PAIRED, SERVICE_REMOVE_PRIVACY_ZONE, + SERVICE_GET_USER_KEYRING_INFO, ] DOORBELL_TEXT_SCHEMA = vol.All( @@ -69,6 +87,15 @@ REMOVE_PRIVACY_ZONE_SCHEMA = vol.All( cv.has_at_least_one_key(ATTR_DEVICE_ID), ) +GET_USER_KEYRING_INFO_SCHEMA = vol.All( + vol.Schema( + { + **cv.ENTITY_SERVICE_FIELDS, + }, + ), + cv.has_at_least_one_key(ATTR_DEVICE_ID), +) + @callback def _async_get_ufp_instance(hass: HomeAssistant, device_id: str) -> ProtectApiClient: @@ -205,26 +232,70 @@ async def set_chime_paired_doorbells(call: ServiceCall) -> None: await chime.save_device(data_before_changed) +async def get_user_keyring_info(call: ServiceCall) -> ServiceResponse: + """Get the user keyring info.""" + camera = _async_get_ufp_camera(call) + ulp_users = camera.api.bootstrap.ulp_users.as_list() + user_keyrings: list[JsonValueType] = [ + { + KEYRINGS_USER_FULL_NAME: user.full_name, + KEYRINGS_USER_STATUS: user.status, + KEYRINGS_ULP_ID: user.ulp_id, + "keys": [ + { + KEYRINGS_KEY_TYPE: key.registry_type, + **( + {KEYRINGS_KEY_TYPE_ID_FINGERPRINT: key.registry_id} + if key.registry_type == "fingerprint" + else {} + ), + **( + {KEYRINGS_KEY_TYPE_ID_NFC: key.registry_id} + if key.registry_type == "nfc" + else {} + ), + } + for key in camera.api.bootstrap.keyrings.as_list() + if key.ulp_user == user.ulp_id + ], + } + for user in ulp_users + ] + + response: ServiceResponse = {"users": user_keyrings} + return response + + SERVICES = [ ( SERVICE_ADD_DOORBELL_TEXT, add_doorbell_text, DOORBELL_TEXT_SCHEMA, + SupportsResponse.NONE, ), ( SERVICE_REMOVE_DOORBELL_TEXT, remove_doorbell_text, DOORBELL_TEXT_SCHEMA, + SupportsResponse.NONE, ), ( SERVICE_SET_CHIME_PAIRED, set_chime_paired_doorbells, CHIME_PAIRED_SCHEMA, + SupportsResponse.NONE, ), ( SERVICE_REMOVE_PRIVACY_ZONE, remove_privacy_zone, REMOVE_PRIVACY_ZONE_SCHEMA, + SupportsResponse.NONE, + ), + ( + SERVICE_GET_USER_KEYRING_INFO, + get_user_keyring_info, + GET_USER_KEYRING_INFO_SCHEMA, + SupportsResponse.ONLY, ), ] @@ -232,5 +303,7 @@ SERVICES = [ def async_setup_services(hass: HomeAssistant) -> None: """Set up the global UniFi Protect services.""" - for name, method, schema in SERVICES: - hass.services.async_register(DOMAIN, name, method, schema=schema) + for name, method, schema, supports_response in SERVICES: + hass.services.async_register( + DOMAIN, name, method, schema=schema, supports_response=supports_response + ) diff --git a/homeassistant/components/unifiprotect/services.yaml b/homeassistant/components/unifiprotect/services.yaml index 192dfd0566f..b620c195fc2 100644 --- a/homeassistant/components/unifiprotect/services.yaml +++ b/homeassistant/components/unifiprotect/services.yaml @@ -53,3 +53,10 @@ remove_privacy_zone: required: true selector: text: +get_user_keyring_info: + fields: + device_id: + required: true + selector: + device: + integration: unifiprotect diff --git a/homeassistant/components/unifiprotect/strings.json b/homeassistant/components/unifiprotect/strings.json index 8ecb4076409..cde8c88d169 100644 --- a/homeassistant/components/unifiprotect/strings.json +++ b/homeassistant/components/unifiprotect/strings.json @@ -225,6 +225,16 @@ "description": "The name of the zone to remove." } } + }, + "get_user_keyring_info": { + "name": "Retrieve Keyring Details for Users", + "description": "Fetch a detailed list of users with NFC and fingerprint associations for automations.", + "fields": { + "device_id": { + "name": "UniFi Protect NVR", + "description": "Any device from the UniFi Protect instance you want to retrieve keyring details. This is useful for systems with multiple Protect instances." + } + } } } } diff --git a/tests/components/unifiprotect/test_services.py b/tests/components/unifiprotect/test_services.py index 84e0e74a492..efc9d1ace9e 100644 --- a/tests/components/unifiprotect/test_services.py +++ b/tests/components/unifiprotect/test_services.py @@ -9,9 +9,19 @@ from uiprotect.data import Camera, Chime, Color, Light, ModelType from uiprotect.data.devices import CameraZone from uiprotect.exceptions import BadRequest -from homeassistant.components.unifiprotect.const import ATTR_MESSAGE, DOMAIN +from homeassistant.components.unifiprotect.const import ( + ATTR_MESSAGE, + DOMAIN, + KEYRINGS_KEY_TYPE, + KEYRINGS_KEY_TYPE_ID_FINGERPRINT, + KEYRINGS_KEY_TYPE_ID_NFC, + KEYRINGS_ULP_ID, + KEYRINGS_USER_FULL_NAME, + KEYRINGS_USER_STATUS, +) from homeassistant.components.unifiprotect.services import ( SERVICE_ADD_DOORBELL_TEXT, + SERVICE_GET_USER_KEYRING_INFO, SERVICE_REMOVE_DOORBELL_TEXT, SERVICE_REMOVE_PRIVACY_ZONE, SERVICE_SET_CHIME_PAIRED, @@ -249,3 +259,59 @@ async def test_remove_privacy_zone( ) ufp.api.update_device.assert_called() assert not doorbell.privacy_zones + + +@pytest.mark.asyncio +async def test_get_doorbell_user( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + ufp: MockUFPFixture, + doorbell: Camera, +) -> None: + """Test get_doorbell_user service.""" + + ulp_user = Mock(full_name="Test User", status="active", ulp_id="user_ulp_id") + keyring = Mock( + registry_type="nfc", + registry_id="123456", + ulp_user="user_ulp_id", + ) + keyring_2 = Mock( + registry_type="fingerprint", + registry_id="2", + ulp_user="user_ulp_id", + ) + ufp.api.bootstrap.ulp_users.as_list = Mock(return_value=[ulp_user]) + ufp.api.bootstrap.keyrings.as_list = Mock(return_value=[keyring, keyring_2]) + + await init_entry(hass, ufp, [doorbell]) + + camera_entry = entity_registry.async_get("binary_sensor.test_camera_doorbell") + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_USER_KEYRING_INFO, + {ATTR_DEVICE_ID: camera_entry.device_id}, + blocking=True, + return_response=True, + ) + + assert response == { + "users": [ + { + KEYRINGS_USER_FULL_NAME: "Test User", + "keys": [ + { + KEYRINGS_KEY_TYPE: "nfc", + KEYRINGS_KEY_TYPE_ID_NFC: "123456", + }, + { + KEYRINGS_KEY_TYPE_ID_FINGERPRINT: "2", + KEYRINGS_KEY_TYPE: "fingerprint", + }, + ], + KEYRINGS_USER_STATUS: "active", + KEYRINGS_ULP_ID: "user_ulp_id", + }, + ], + } From 344a03d9ce060e240552fc358912e0d3b6e0642c Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Sun, 22 Dec 2024 19:55:45 +0100 Subject: [PATCH 634/677] Remove unused fixture from LCN tests (#133821) --- tests/components/lcn/conftest.py | 10 -- tests/components/lcn/fixtures/config.json | 165 ---------------------- 2 files changed, 175 deletions(-) delete mode 100644 tests/components/lcn/fixtures/config.json diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index 3c5979c3c36..d8dee472946 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -16,7 +16,6 @@ from homeassistant.components.lcn.helpers import AddressType, generate_unique_id from homeassistant.const import CONF_ADDRESS, CONF_DEVICES, CONF_ENTITIES, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture @@ -134,15 +133,6 @@ async def init_integration( return lcn_connection -async def setup_component(hass: HomeAssistant) -> None: - """Set up the LCN component.""" - fixture_filename = "lcn/config.json" - config_data = json.loads(load_fixture(fixture_filename)) - - await async_setup_component(hass, DOMAIN, config_data) - await hass.async_block_till_done() - - def get_device( hass: HomeAssistant, entry: MockConfigEntry, address: AddressType ) -> dr.DeviceEntry: diff --git a/tests/components/lcn/fixtures/config.json b/tests/components/lcn/fixtures/config.json deleted file mode 100644 index ed3e3500900..00000000000 --- a/tests/components/lcn/fixtures/config.json +++ /dev/null @@ -1,165 +0,0 @@ -{ - "lcn": { - "connections": [ - { - "host": "192.168.2.41", - "port": 4114, - "username": "lcn", - "password": "lcn", - "sk_num_tries": 0, - "dim_mode": "steps200", - "name": "pchk" - }, - { - "name": "myhome", - "host": "192.168.2.42", - "port": 4114, - "username": "lcn", - "password": "lcn", - "sk_num_tries": 0, - "dim_mode": "steps200" - } - ], - "lights": [ - { - "name": "Light_Output1", - "address": "pchk.s0.m7", - "output": "output1", - "dimmable": true, - "transition": 5 - }, - { - "name": "Light_Output2", - "address": "pchk.s0.m7", - "output": "output2", - "dimmable": false, - "transition": 0 - }, - { - "name": "Light_Relay1", - "address": "s0.m7", - "output": "relay1" - }, - { - "name": "Light_Relay3", - "address": "myhome.s0.m7", - "output": "relay3" - }, - { - "name": "Light_Relay4", - "address": "myhome.s0.m7", - "output": "relay4" - } - ], - "switches": [ - { - "name": "Switch_Output1", - "address": "s0.m7", - "output": "output1" - }, - { - "name": "Switch_Output2", - "address": "s0.m7", - "output": "output2" - }, - { - "name": "Switch_Relay1", - "address": "s0.m7", - "output": "relay1" - }, - { - "name": "Switch_Relay2", - "address": "s0.m7", - "output": "relay2" - }, - { - "name": "Switch_Group5", - "address": "s0.g5", - "output": "relay1" - } - ], - "covers": [ - { - "name": "Cover_Ouputs", - "address": "s0.m7", - "motor": "outputs", - "reverse_time": "rt1200" - }, - { - "name": "Cover_Relays", - "address": "s0.m7", - "motor": "motor1" - } - ], - "climates": [ - { - "name": "Climate1", - "address": "s0.m7", - "source": "var1", - "setpoint": "r1varsetpoint", - "lockable": true, - "min_temp": 0, - "max_temp": 40, - "unit_of_measurement": "°C" - } - ], - "scenes": [ - { - "name": "Romantic", - "address": "s0.m7", - "register": 0, - "scene": 0, - "outputs": ["output1", "output2", "relay1"] - }, - { - "name": "Romantic Transition", - "address": "s0.m7", - "register": 0, - "scene": 1, - "outputs": ["output1", "output2", "relay1"], - "transition": 10 - } - ], - "binary_sensors": [ - { - "name": "Sensor_LockRegulator1", - "address": "s0.m7", - "source": "r1varsetpoint" - }, - { - "name": "Binary_Sensor1", - "address": "s0.m7", - "source": "binsensor1" - }, - { - "name": "Sensor_KeyLock", - "address": "s0.m7", - "source": "a5" - } - ], - "sensors": [ - { - "name": "Sensor_Var1", - "address": "s0.m7", - "source": "var1", - "unit_of_measurement": "°C" - }, - { - "name": "Sensor_Setpoint1", - "address": "s0.m7", - "source": "r1varsetpoint", - "unit_of_measurement": "°C" - }, - { - "name": "Sensor_Led6", - "address": "s0.m7", - "source": "led6" - }, - { - "name": "Sensor_LogicOp1", - "address": "s0.m7", - "source": "logicop1" - } - ] - } -} From 475f19c1409f0e07e40a88b27e5545cf8e9d90a1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 08:56:09 -1000 Subject: [PATCH 635/677] Bump flux_led to 1.1.0 (#133818) --- homeassistant/components/flux_led/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/flux_led/manifest.json b/homeassistant/components/flux_led/manifest.json index a55ae028342..962098a0bf8 100644 --- a/homeassistant/components/flux_led/manifest.json +++ b/homeassistant/components/flux_led/manifest.json @@ -53,5 +53,5 @@ "documentation": "https://www.home-assistant.io/integrations/flux_led", "iot_class": "local_push", "loggers": ["flux_led"], - "requirements": ["flux-led==1.0.4"] + "requirements": ["flux-led==1.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2381e18a42d..321f104fa1f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -924,7 +924,7 @@ flexit_bacnet==2.2.1 flipr-api==1.6.1 # homeassistant.components.flux_led -flux-led==1.0.4 +flux-led==1.1.0 # homeassistant.components.homekit # homeassistant.components.recorder diff --git a/requirements_test_all.txt b/requirements_test_all.txt index db166894f00..58004e5bd8b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -783,7 +783,7 @@ flexit_bacnet==2.2.1 flipr-api==1.6.1 # homeassistant.components.flux_led -flux-led==1.0.4 +flux-led==1.1.0 # homeassistant.components.homekit # homeassistant.components.recorder From b1f6563fb2c3bfe6d75a4c2b26af102b6b7069ae Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sun, 22 Dec 2024 18:56:33 +0000 Subject: [PATCH 636/677] Add camera platform to tplink integration (#129180) Co-authored-by: Teemu R. --- homeassistant/components/tplink/__init__.py | 13 +- homeassistant/components/tplink/camera.py | 220 +++++ .../components/tplink/config_flow.py | 257 ++++- homeassistant/components/tplink/const.py | 3 + homeassistant/components/tplink/entity.py | 14 + homeassistant/components/tplink/manifest.json | 2 +- homeassistant/components/tplink/models.py | 4 + homeassistant/components/tplink/strings.json | 27 +- tests/components/tplink/__init__.py | 71 +- tests/components/tplink/conftest.py | 94 +- .../components/tplink/fixtures/features.json | 30 + .../tplink/snapshots/test_camera.ambr | 87 ++ tests/components/tplink/test_camera.py | 431 +++++++++ tests/components/tplink/test_config_flow.py | 899 ++++++++++++++++-- tests/components/tplink/test_init.py | 10 +- 15 files changed, 2012 insertions(+), 150 deletions(-) create mode 100644 homeassistant/components/tplink/camera.py create mode 100644 tests/components/tplink/snapshots/test_camera.ambr create mode 100644 tests/components/tplink/test_camera.py diff --git a/homeassistant/components/tplink/__init__.py b/homeassistant/components/tplink/__init__.py index a7ffce686be..e2a2f99517f 100644 --- a/homeassistant/components/tplink/__init__.py +++ b/homeassistant/components/tplink/__init__.py @@ -47,10 +47,12 @@ from homeassistant.helpers.typing import ConfigType from .const import ( CONF_AES_KEYS, + CONF_CAMERA_CREDENTIALS, CONF_CONFIG_ENTRY_MINOR_VERSION, CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, CONF_DEVICE_CONFIG, + CONF_LIVE_VIEW, CONF_USES_HTTP, CONNECT_TIMEOUT, DISCOVERY_TIMEOUT, @@ -226,7 +228,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: TPLinkConfigEntry) -> bo for child in device.children ] - entry.runtime_data = TPLinkData(parent_coordinator, child_coordinators) + camera_creds: Credentials | None = None + if camera_creds_dict := entry.data.get(CONF_CAMERA_CREDENTIALS): + camera_creds = Credentials( + camera_creds_dict[CONF_USERNAME], camera_creds_dict[CONF_PASSWORD] + ) + live_view = entry.data.get(CONF_LIVE_VIEW) + + entry.runtime_data = TPLinkData( + parent_coordinator, child_coordinators, camera_creds, live_view + ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/tplink/camera.py b/homeassistant/components/tplink/camera.py new file mode 100644 index 00000000000..5ed279909d6 --- /dev/null +++ b/homeassistant/components/tplink/camera.py @@ -0,0 +1,220 @@ +"""Support for TPLink camera entities.""" + +import asyncio +from dataclasses import dataclass +import logging +import time + +from aiohttp import web +from haffmpeg.camera import CameraMjpeg +from kasa import Credentials, Device, Module +from kasa.smartcam.modules import Camera as CameraModule + +from homeassistant.components import ffmpeg, stream +from homeassistant.components.camera import ( + Camera, + CameraEntityDescription, + CameraEntityFeature, +) +from homeassistant.config_entries import ConfigFlowContext +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TPLinkConfigEntry, legacy_device_id +from .const import CONF_CAMERA_CREDENTIALS +from .coordinator import TPLinkDataUpdateCoordinator +from .entity import CoordinatedTPLinkEntity, TPLinkModuleEntityDescription + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class TPLinkCameraEntityDescription( + CameraEntityDescription, TPLinkModuleEntityDescription +): + """Base class for camera entity description.""" + + +CAMERA_DESCRIPTIONS: tuple[TPLinkCameraEntityDescription, ...] = ( + TPLinkCameraEntityDescription( + key="live_view", + translation_key="live_view", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: TPLinkConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up camera entities.""" + data = config_entry.runtime_data + parent_coordinator = data.parent_coordinator + device = parent_coordinator.device + camera_credentials = data.camera_credentials + live_view = data.live_view + ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass) + + async_add_entities( + TPLinkCameraEntity( + device, + parent_coordinator, + description, + camera_module=camera_module, + parent=None, + ffmpeg_manager=ffmpeg_manager, + camera_credentials=camera_credentials, + ) + for description in CAMERA_DESCRIPTIONS + if (camera_module := device.modules.get(Module.Camera)) and live_view + ) + + +class TPLinkCameraEntity(CoordinatedTPLinkEntity, Camera): + """Representation of a TPLink camera.""" + + IMAGE_INTERVAL = 5 * 60 + + _attr_supported_features = CameraEntityFeature.STREAM | CameraEntityFeature.ON_OFF + + entity_description: TPLinkCameraEntityDescription + + def __init__( + self, + device: Device, + coordinator: TPLinkDataUpdateCoordinator, + description: TPLinkCameraEntityDescription, + *, + camera_module: CameraModule, + parent: Device | None = None, + ffmpeg_manager: ffmpeg.FFmpegManager, + camera_credentials: Credentials | None, + ) -> None: + """Initialize a TPlink camera.""" + self.entity_description = description + self._camera_module = camera_module + self._video_url = camera_module.stream_rtsp_url(camera_credentials) + self._image: bytes | None = None + super().__init__(device, coordinator, parent=parent) + Camera.__init__(self) + self._ffmpeg_manager = ffmpeg_manager + self._image_lock = asyncio.Lock() + self._last_update: float = 0 + self._camera_credentials = camera_credentials + self._can_stream = True + self._http_mpeg_stream_running = False + + def _get_unique_id(self) -> str: + """Return unique ID for the entity.""" + return f"{legacy_device_id(self._device)}-{self.entity_description}" + + @callback + def _async_update_attrs(self) -> None: + """Update the entity's attributes.""" + self._attr_is_on = self._camera_module.is_on + + async def stream_source(self) -> str | None: + """Return the source of the stream.""" + return self._video_url + + async def _async_check_stream_auth(self, video_url: str) -> None: + """Check for an auth error and start reauth flow.""" + try: + await stream.async_check_stream_client_error(self.hass, video_url) + except stream.StreamOpenClientError as ex: + if ex.stream_client_error is stream.StreamClientError.Unauthorized: + _LOGGER.debug( + "Camera stream failed authentication for %s", + self._device.host, + ) + self._can_stream = False + self.coordinator.config_entry.async_start_reauth( + self.hass, + ConfigFlowContext( + reauth_source=CONF_CAMERA_CREDENTIALS, # type: ignore[typeddict-unknown-key] + ), + {"device": self._device}, + ) + + async def async_camera_image( + self, width: int | None = None, height: int | None = None + ) -> bytes | None: + """Return a still image response from the camera.""" + now = time.monotonic() + + if self._image and now - self._last_update < self.IMAGE_INTERVAL: + return self._image + + # Don't try to capture a new image if a stream is running + if (self.stream and self.stream.available) or self._http_mpeg_stream_running: + return self._image + + if self._can_stream and (video_url := self._video_url): + # Sometimes the front end makes multiple image requests + async with self._image_lock: + if self._image and (now - self._last_update) < self.IMAGE_INTERVAL: + return self._image + + _LOGGER.debug("Updating camera image for %s", self._device.host) + image = await ffmpeg.async_get_image( + self.hass, + video_url, + width=width, + height=height, + ) + if image: + self._image = image + self._last_update = now + _LOGGER.debug("Updated camera image for %s", self._device.host) + # This coroutine is called by camera with an asyncio.timeout + # so image could be None whereas an auth issue returns b'' + elif image == b"": + _LOGGER.debug( + "Empty camera image returned for %s", self._device.host + ) + # image could be empty if a stream is running so check for explicit auth error + await self._async_check_stream_auth(video_url) + else: + _LOGGER.debug( + "None camera image returned for %s", self._device.host + ) + + return self._image + + async def handle_async_mjpeg_stream( + self, request: web.Request + ) -> web.StreamResponse | None: + """Generate an HTTP MJPEG stream from the camera. + + The frontend falls back to calling this method if the HLS + stream fails. + """ + _LOGGER.debug("Starting http mjpeg stream for %s", self._device.host) + if self._video_url is None or self._can_stream is False: + return None + + mjpeg_stream = CameraMjpeg(self._ffmpeg_manager.binary) + await mjpeg_stream.open_camera(self._video_url) + self._http_mpeg_stream_running = True + try: + stream_reader = await mjpeg_stream.get_reader() + return await async_aiohttp_proxy_stream( + self.hass, + request, + stream_reader, + self._ffmpeg_manager.ffmpeg_stream_content_type, + ) + finally: + self._http_mpeg_stream_running = False + await mjpeg_stream.close() + _LOGGER.debug("Stopped http mjpeg stream for %s", self._device.host) + + async def async_turn_on(self) -> None: + """Turn on camera.""" + await self._camera_module.set_state(True) + + async def async_turn_off(self) -> None: + """Turn off camera.""" + await self._camera_module.set_state(False) diff --git a/homeassistant/components/tplink/config_flow.py b/homeassistant/components/tplink/config_flow.py index 63f1b4e125b..db6f9a58ba5 100644 --- a/homeassistant/components/tplink/config_flow.py +++ b/homeassistant/components/tplink/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Mapping import logging -from typing import TYPE_CHECKING, Any, Self +from typing import TYPE_CHECKING, Any, Self, cast from kasa import ( AuthenticationError, @@ -13,13 +13,15 @@ from kasa import ( DeviceConfig, Discover, KasaException, + Module, TimeoutError, ) import voluptuous as vol -from homeassistant.components import dhcp +from homeassistant.components import dhcp, ffmpeg, stream from homeassistant.config_entries import ( SOURCE_REAUTH, + SOURCE_RECONFIGURE, ConfigEntry, ConfigEntryState, ConfigFlow, @@ -31,6 +33,7 @@ from homeassistant.const import ( CONF_HOST, CONF_MAC, CONF_MODEL, + CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, @@ -48,9 +51,11 @@ from . import ( ) from .const import ( CONF_AES_KEYS, + CONF_CAMERA_CREDENTIALS, CONF_CONFIG_ENTRY_MINOR_VERSION, CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, + CONF_LIVE_VIEW, CONF_USES_HTTP, CONNECT_TIMEOUT, DOMAIN, @@ -62,6 +67,16 @@ STEP_AUTH_DATA_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} ) +STEP_RECONFIGURE_DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) + +STEP_CAMERA_AUTH_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_LIVE_VIEW): bool, + vol.Optional(CONF_USERNAME): str, + vol.Optional(CONF_PASSWORD): str, + } +) + class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for tplink.""" @@ -227,7 +242,12 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self.hass.async_create_task( self._async_reload_requires_auth_entries(), eager_start=False ) - return self._async_create_entry_from_device(self._discovered_device) + if self._async_supports_camera_credentials(device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device( + self._discovered_device + ) self.context["title_placeholders"] = placeholders return self.async_show_form( @@ -253,7 +273,12 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): """Confirm discovery.""" assert self._discovered_device is not None if user_input is not None: - return self._async_create_entry_from_device(self._discovered_device) + if self._async_supports_camera_credentials(self._discovered_device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device( + self._discovered_device + ) self._set_confirm_only() placeholders = self._async_make_placeholders_from_discovery() @@ -282,6 +307,13 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): return host, port + def _async_supports_camera_credentials(self, device: Device) -> bool: + """Return True if device could have separate camera credentials.""" + if camera_module := device.modules.get(Module.Camera): + self._discovered_device = device + return bool(camera_module.stream_rtsp_url()) + return False + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -324,7 +356,11 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): else: if not device: return await self.async_step_user_auth_confirm() - return self._async_create_entry_from_device(device) + + if self._async_supports_camera_credentials(device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device(device) return self.async_show_form( step_id="user", @@ -375,7 +411,10 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self.hass.async_create_task( self._async_reload_requires_auth_entries(), eager_start=False ) - return self._async_create_entry_from_device(device) + if self._async_supports_camera_credentials(device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device(device) return self.async_show_form( step_id="user_auth_confirm", @@ -384,6 +423,104 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders=placeholders, ) + def _create_camera_entry( + self, device: Device, un: str, pw: str + ) -> ConfigFlowResult: + entry_data: dict[str, bool | dict[str, str]] = {CONF_LIVE_VIEW: True} + entry_data[CONF_CAMERA_CREDENTIALS] = { + CONF_USERNAME: un, + CONF_PASSWORD: pw, + } + _LOGGER.debug("Creating camera account entry for device %s", device.host) + return self._async_create_or_update_entry_from_device( + device, camera_data=entry_data + ) + + async def async_step_camera_auth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that gives the user option to set camera credentials.""" + errors: dict[str, str] = {} + placeholders: dict[str, str] = {} + device = self._discovered_device + assert device + + if user_input: + live_view = user_input[CONF_LIVE_VIEW] + if not live_view: + return self._async_create_or_update_entry_from_device( + device, camera_data={CONF_LIVE_VIEW: False} + ) + + un = user_input.get(CONF_USERNAME) + pw = user_input.get(CONF_PASSWORD) + + if user_input and un and pw: + camera_creds = Credentials(un, cast(str, pw)) + + camera_module = device.modules[Module.Camera] + rtsp_url = camera_module.stream_rtsp_url(camera_creds) + assert rtsp_url + + # If camera fails to create HLS stream via 'stream' then try + # ffmpeg.async_get_image as some cameras do not work with HLS + # and the frontend will fallback to mpeg on error + try: + await stream.async_check_stream_client_error(self.hass, rtsp_url) + except stream.StreamOpenClientError as ex: + if ex.stream_client_error is stream.StreamClientError.Unauthorized: + errors["base"] = "invalid_camera_auth" + else: + _LOGGER.debug( + "Device %s client error checking stream: %s", device.host, ex + ) + if await ffmpeg.async_get_image(self.hass, rtsp_url): + return self._create_camera_entry(device, un, pw) + + errors["base"] = "cannot_connect_camera" + placeholders["error"] = str(ex) + except Exception as ex: # noqa: BLE001 + _LOGGER.debug("Device %s error checking stream: %s", device.host, ex) + if await ffmpeg.async_get_image(self.hass, rtsp_url): + return self._create_camera_entry(device, un, pw) + + errors["base"] = "cannot_connect_camera" + placeholders["error"] = str(ex) + else: + return self._create_camera_entry(device, un, pw) + + elif user_input: + errors["base"] = "camera_creds" + + entry = None + if self.source == SOURCE_RECONFIGURE: + entry = self._get_reconfigure_entry() + elif self.source == SOURCE_REAUTH: + entry = self._get_reauth_entry() + + if entry: + placeholders[CONF_NAME] = entry.data[CONF_ALIAS] + placeholders[CONF_MODEL] = entry.data[CONF_MODEL] + placeholders[CONF_HOST] = entry.data[CONF_HOST] + + if user_input: + form_data = {**user_input} + elif entry: + form_data = {**entry.data.get(CONF_CAMERA_CREDENTIALS, {})} + form_data[CONF_LIVE_VIEW] = entry.data.get(CONF_LIVE_VIEW, False) + else: + form_data = {} + + self.context["title_placeholders"] = placeholders + return self.async_show_form( + step_id="camera_auth_confirm", + data_schema=self.add_suggested_values_to_schema( + STEP_CAMERA_AUTH_DATA_SCHEMA, form_data + ), + errors=errors, + description_placeholders=placeholders, + ) + async def async_step_pick_device( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -403,7 +540,11 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user_auth_confirm() except KasaException: return self.async_abort(reason="cannot_connect") - return self._async_create_entry_from_device(device) + + if self._async_supports_camera_credentials(device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device(device) configured_devices = { entry.unique_id for entry in self._async_current_entries() @@ -444,11 +585,19 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): _config_entries.flow.async_abort(flow["flow_id"]) @callback - def _async_create_entry_from_device(self, device: Device) -> ConfigFlowResult: + def _async_create_or_update_entry_from_device( + self, device: Device, *, camera_data: dict | None = None + ) -> ConfigFlowResult: """Create a config entry from a smart device.""" - # This is only ever called after a successful device update so we know that - # the credential_hash is correct and should be saved. - self._abort_if_unique_id_configured(updates={CONF_HOST: device.host}) + entry = None + if self.source == SOURCE_RECONFIGURE: + entry = self._get_reconfigure_entry() + elif self.source == SOURCE_REAUTH: + entry = self._get_reauth_entry() + + if not entry: + self._abort_if_unique_id_configured(updates={CONF_HOST: device.host}) + data: dict[str, Any] = { CONF_HOST: device.host, CONF_ALIAS: device.alias, @@ -456,16 +605,28 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): CONF_CONNECTION_PARAMETERS: device.config.connection_type.to_dict(), CONF_USES_HTTP: device.config.uses_http, } + if camera_data is not None: + data[CONF_LIVE_VIEW] = camera_data[CONF_LIVE_VIEW] + if camera_creds := camera_data.get(CONF_CAMERA_CREDENTIALS): + data[CONF_CAMERA_CREDENTIALS] = camera_creds + if device.config.aes_keys: data[CONF_AES_KEYS] = device.config.aes_keys + + # This is only ever called after a successful device update so we know that + # the credential_hash is correct and should be saved. if device.credentials_hash: data[CONF_CREDENTIALS_HASH] = device.credentials_hash if port := device.config.port_override: data[CONF_PORT] = port - return self.async_create_entry( - title=f"{device.alias} {device.model}", - data=data, - ) + + if not entry: + return self.async_create_entry( + title=f"{device.alias} {device.model}", + data=data, + ) + + return self.async_update_reload_and_abort(entry, data=data) async def _async_try_connect_all( self, @@ -546,7 +707,8 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): credentials: Credentials | None, ) -> Device: """Try to connect.""" - self._async_abort_entries_match({CONF_HOST: discovered_device.host}) + if self.source not in {SOURCE_RECONFIGURE, SOURCE_REAUTH}: + self._async_abort_entries_match({CONF_HOST: discovered_device.host}) config = discovered_device.config if credentials: @@ -566,6 +728,10 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Start the reauthentication flow if the device needs updated credentials.""" + if self.context.get("reauth_source") == CONF_CAMERA_CREDENTIALS: + self._discovered_device = entry_data["device"] + return await self.async_step_camera_auth_confirm() + return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -634,3 +800,62 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, description_placeholders=placeholders, ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Trigger a reconfiguration flow.""" + errors: dict[str, str] = {} + placeholders: dict[str, str] = {} + + reconfigure_entry = self._get_reconfigure_entry() + assert reconfigure_entry.unique_id + await self.async_set_unique_id(reconfigure_entry.unique_id) + + host = reconfigure_entry.data[CONF_HOST] + port = reconfigure_entry.data.get(CONF_PORT) + + if user_input is not None: + host, port = self._async_get_host_port(host) + + self.host = host + credentials = await get_credentials(self.hass) + try: + device = await self._async_try_discover_and_update( + host, + credentials, + raise_on_progress=False, + raise_on_timeout=False, + port=port, + ) or await self._async_try_connect_all( + host, + credentials=credentials, + raise_on_progress=False, + port=port, + ) + except AuthenticationError: # Error from the update() + return await self.async_step_user_auth_confirm() + except KasaException as ex: + errors["base"] = "cannot_connect" + placeholders["error"] = str(ex) + else: + if not device: + return await self.async_step_user_auth_confirm() + + if self._async_supports_camera_credentials(device): + return await self.async_step_camera_auth_confirm() + + return self._async_create_or_update_entry_from_device(device) + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema( + STEP_RECONFIGURE_DATA_SCHEMA, + {CONF_HOST: f"{host}:{port}" if port else host}, + ), + errors=errors, + description_placeholders={ + **placeholders, + CONF_MAC: reconfigure_entry.unique_id, + }, + ) diff --git a/homeassistant/components/tplink/const.py b/homeassistant/components/tplink/const.py index 28e4b04bcf9..61c1bf1cb9b 100644 --- a/homeassistant/components/tplink/const.py +++ b/homeassistant/components/tplink/const.py @@ -24,12 +24,15 @@ CONF_CREDENTIALS_HASH: Final = "credentials_hash" CONF_CONNECTION_PARAMETERS: Final = "connection_parameters" CONF_USES_HTTP: Final = "uses_http" CONF_AES_KEYS: Final = "aes_keys" +CONF_CAMERA_CREDENTIALS = "camera_credentials" +CONF_LIVE_VIEW = "live_view" CONF_CONFIG_ENTRY_MINOR_VERSION: Final = 5 PLATFORMS: Final = [ Platform.BINARY_SENSOR, Platform.BUTTON, + Platform.CAMERA, Platform.CLIMATE, Platform.FAN, Platform.LIGHT, diff --git a/homeassistant/components/tplink/entity.py b/homeassistant/components/tplink/entity.py index ef9e2ad5eee..60d066012a2 100644 --- a/homeassistant/components/tplink/entity.py +++ b/homeassistant/components/tplink/entity.py @@ -73,6 +73,13 @@ EXCLUDED_FEATURES = { "check_latest_firmware", # siren "alarm", + # camera + "pan_left", + "pan_right", + "pan_step", + "tilt_up", + "tilt_down", + "tilt_step", } @@ -91,6 +98,13 @@ class TPLinkFeatureEntityDescription(EntityDescription): deprecated_info: DeprecatedInfo | None = None +@dataclass(frozen=True, kw_only=True) +class TPLinkModuleEntityDescription(EntityDescription): + """Base class for a TPLink module based entity description.""" + + deprecated_info: DeprecatedInfo | None = None + + def async_refresh_after[_T: CoordinatedTPLinkEntity, **_P]( func: Callable[Concatenate[_T, _P], Awaitable[None]], ) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]: diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 65061882027..7797f0a36a3 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -3,7 +3,7 @@ "name": "TP-Link Smart Home", "codeowners": ["@rytilahti", "@bdraco", "@sdb9696"], "config_flow": true, - "dependencies": ["network"], + "dependencies": ["network", "ffmpeg", "stream"], "dhcp": [ { "registered_devices": true diff --git a/homeassistant/components/tplink/models.py b/homeassistant/components/tplink/models.py index ced58d3d21f..389260a388b 100644 --- a/homeassistant/components/tplink/models.py +++ b/homeassistant/components/tplink/models.py @@ -4,6 +4,8 @@ from __future__ import annotations from dataclasses import dataclass +from kasa import Credentials + from .coordinator import TPLinkDataUpdateCoordinator @@ -13,3 +15,5 @@ class TPLinkData: parent_coordinator: TPLinkDataUpdateCoordinator children_coordinators: list[TPLinkDataUpdateCoordinator] + camera_credentials: Credentials | None + live_view: bool | None diff --git a/homeassistant/components/tplink/strings.json b/homeassistant/components/tplink/strings.json index 8e5118c2720..7443636c3c0 100644 --- a/homeassistant/components/tplink/strings.json +++ b/homeassistant/components/tplink/strings.json @@ -42,16 +42,36 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" } + }, + "reconfigure": { + "title": "Reconfigure TPLink entry", + "description": "Update your configuration for device {mac}", + "data": { + "host": "[%key:common::config_flow::data::host%]" + } + }, + "camera_auth_confirm": { + "title": "Set camera account credentials", + "description": "Input device camera account credentials. Leave blank if they are the same as your TPLink cloud credentials.", + "data": { + "live_view": "Enable camera live view", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } } }, "error": { "cannot_connect": "Connection error: {error}", - "invalid_auth": "Invalid authentication: {error}" + "invalid_auth": "Unable to authenticate: {error}", + "invalid_camera_auth": "Camera stream authentication failed", + "cannot_connect_camera": "Unable to access the camera stream, verify that you have set up the camera account: {error}", + "camera_creds": "You have to set both username and password" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, @@ -102,6 +122,11 @@ "name": "Stop alarm" } }, + "camera": { + "live_view": { + "name": "Live view" + } + }, "select": { "light_preset": { "name": "Light preset" diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index fdef5c35bfa..e322cf9f5de 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -1,6 +1,7 @@ """Tests for the TP-Link component.""" from collections import namedtuple +from dataclasses import replace from datetime import datetime from typing import Any from unittest.mock import AsyncMock, MagicMock, patch @@ -19,15 +20,18 @@ from kasa import ( ) from kasa.interfaces import Fan, Light, LightEffect, LightState from kasa.smart.modules.alarm import Alarm +from kasa.smartcam.modules.camera import LOCAL_STREAMING_PORT, Camera from syrupy import SnapshotAssertion from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN from homeassistant.components.tplink import ( CONF_AES_KEYS, CONF_ALIAS, + CONF_CAMERA_CREDENTIALS, CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, CONF_HOST, + CONF_LIVE_VIEW, CONF_MODEL, CONF_USES_HTTP, Credentials, @@ -49,14 +53,19 @@ MODULE = "homeassistant.components.tplink" MODULE_CONFIG_FLOW = "homeassistant.components.tplink.config_flow" IP_ADDRESS = "127.0.0.1" IP_ADDRESS2 = "127.0.0.2" +IP_ADDRESS3 = "127.0.0.3" ALIAS = "My Bulb" +ALIAS_CAMERA = "My Camera" MODEL = "HS100" +MODEL_CAMERA = "C210" MAC_ADDRESS = "aa:bb:cc:dd:ee:ff" DEVICE_ID = "123456789ABCDEFGH" DEVICE_ID_MAC = "AA:BB:CC:DD:EE:FF" DHCP_FORMATTED_MAC_ADDRESS = MAC_ADDRESS.replace(":", "") MAC_ADDRESS2 = "11:22:33:44:55:66" +MAC_ADDRESS3 = "66:55:44:33:22:11" DEFAULT_ENTRY_TITLE = f"{ALIAS} {MODEL}" +DEFAULT_ENTRY_TITLE_CAMERA = f"{ALIAS_CAMERA} {MODEL_CAMERA}" CREDENTIALS_HASH_LEGACY = "" CONN_PARAMS_LEGACY = DeviceConnectionParameters( DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Xor @@ -80,7 +89,26 @@ DEVICE_CONFIG_KLAP = DeviceConfig( CONN_PARAMS_AES = DeviceConnectionParameters( DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes ) -AES_KEYS = {"private": "foo", "public": "bar"} +_test_privkey = ( + "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAKLJKmBWGj6WYo9sewI8vkqar" + "Ed5H1JUr8Jj/LEWLTtV6+Mm4mfyEk6YKFHSmIG4AGgrVsGK/EbEkTZk9CwtixNQpBVc36oN2R" + "vuWWV38YnP4vI63mNxTA/gQonCsahjN4HfwE87pM7O5z39aeunoYm6Be663t33DbJH1ZUbZjm" + "tAgMBAAECgYB1Bn1KaFvRprcQOIJt51E9vNghQbf8rhj0fIEKpdC6mVhNIoUdCO+URNqnh+hP" + "SQIx4QYreUlHbsSeABFxOQSDJm6/kqyQsp59nCVDo/bXTtlvcSJ/sU3riqJNxYqEU1iJ0xMvU" + "N1VKKTmik89J8e5sN9R0AFfUSJIk7MpdOoD2QJBANTbV27nenyvbqee/ul4frdt2rrPGcGpcV" + "QmY87qbbrZgqgL5LMHHD7T/v/I8D1wRog1sBz/AiZGcnv/ox8dHKsCQQDDx8DCGPySSVqKVua" + "yUkBNpglN83wiCXZjyEtWIt+aB1A2n5ektE/o8oHnnOuvMdooxvtid7Mdapi2VLHV7VMHAkAE" + "d0GjWwnv2cJpk+VnQpbuBEkFiFjS/loZWODZM4Pv2qZqHi3DL9AA5XPBLBcWQufH7dBvG06RP" + "QMj5N4oRfUXAkEAuJJkVliqHNvM4OkGewzyFII4+WVYHNqg43dcFuuvtA27AJQ6qYtYXrvp3k" + "phI3yzOIhHTNCea1goepSkR5ODFwJBAJCTRbB+P47aEr/xA51ZFHE6VefDBJG9yg6yK4jcOxg" + "5ficXEpx8442okNtlzwa+QHpm/L3JOFrHwiEeVqXtiqY=" +) +_test_pubkey = ( + "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCiySpgVho+lmKPbHsCPL5KmqxHeR9SVK/CY" + "/yxFi07VevjJuJn8hJOmChR0piBuABoK1bBivxGxJE2ZPQsLYsTUKQVXN+qDdkb7llld/GJz+" + "LyOt5jcUwP4EKJwrGoYzeB38BPO6TOzuc9/Wnrp6GJugXuut7d9w2yR9WVG2Y5rQIDAQAB" +) +AES_KEYS = {"private": _test_privkey, "public": _test_pubkey} DEVICE_CONFIG_AES = DeviceConfig( IP_ADDRESS2, credentials=CREDENTIALS, @@ -88,6 +116,16 @@ DEVICE_CONFIG_AES = DeviceConfig( uses_http=True, aes_keys=AES_KEYS, ) +CONN_PARAMS_AES_CAMERA = DeviceConnectionParameters( + DeviceFamily.SmartIpCamera, DeviceEncryptionType.Aes, https=True, login_version=2 +) +DEVICE_CONFIG_AES_CAMERA = DeviceConfig( + IP_ADDRESS3, + credentials=CREDENTIALS, + connection_type=CONN_PARAMS_AES_CAMERA, + uses_http=True, +) + DEVICE_CONFIG_DICT_KLAP = { k: v for k, v in DEVICE_CONFIG_KLAP.to_dict().items() if k != "credentials" } @@ -119,6 +157,22 @@ CREATE_ENTRY_DATA_AES = { CONF_USES_HTTP: True, CONF_AES_KEYS: AES_KEYS, } +CREATE_ENTRY_DATA_AES_CAMERA = { + CONF_HOST: IP_ADDRESS3, + CONF_ALIAS: ALIAS_CAMERA, + CONF_MODEL: MODEL_CAMERA, + CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AES, + CONF_CONNECTION_PARAMETERS: CONN_PARAMS_AES_CAMERA.to_dict(), + CONF_USES_HTTP: True, + CONF_LIVE_VIEW: True, + CONF_CAMERA_CREDENTIALS: {"username": "camuser", "password": "campass"}, +} +SMALLEST_VALID_JPEG = ( + "ffd8ffe000104a46494600010101004800480000ffdb00430003020202020203020202030303030406040404040408060" + "6050609080a0a090809090a0c0f0c0a0b0e0b09090d110d0e0f101011100a0c12131210130f101010ffc9000b08000100" + "0101011100ffcc000600101005ffda0008010100003f00d2cf20ffd9" +) +SMALLEST_VALID_JPEG_BYTES = bytes.fromhex(SMALLEST_VALID_JPEG) def _load_feature_fixtures(): @@ -245,6 +299,9 @@ def _mocked_device( device.modules = {} device.features = {} + # replace device_config to prevent changes affecting between tests + device_config = replace(device_config) + if not ip_address: ip_address = IP_ADDRESS else: @@ -429,6 +486,17 @@ def _mocked_alarm_module(device): return alarm +def _mocked_camera_module(device): + camera = MagicMock(auto_spec=Camera, name="Mocked camera") + camera.is_on = True + camera.set_state = AsyncMock() + camera.stream_rtsp_url.return_value = ( + f"rtsp://user:pass@{device.host}:{LOCAL_STREAMING_PORT}/stream1" + ) + + return camera + + def _mocked_strip_children(features=None, alias=None) -> list[Device]: plug0 = _mocked_device( alias="Plug0" if alias is None else alias, @@ -496,6 +564,7 @@ MODULE_TO_MOCK_GEN = { Module.LightEffect: _mocked_light_effect_module, Module.Fan: _mocked_fan_module, Module.Alarm: _mocked_alarm_module, + Module.Camera: _mocked_camera_module, } diff --git a/tests/components/tplink/conftest.py b/tests/components/tplink/conftest.py index 25a4bd20270..f1bbb80b80c 100644 --- a/tests/components/tplink/conftest.py +++ b/tests/components/tplink/conftest.py @@ -1,30 +1,73 @@ """tplink conftest.""" from collections.abc import Generator +from contextlib import contextmanager from unittest.mock import DEFAULT, AsyncMock, patch -from kasa import DeviceConfig +from kasa import DeviceConfig, Module import pytest from homeassistant.components.tplink import DOMAIN from homeassistant.core import HomeAssistant from . import ( + ALIAS_CAMERA, + CREATE_ENTRY_DATA_AES_CAMERA, CREATE_ENTRY_DATA_LEGACY, CREDENTIALS_HASH_AES, CREDENTIALS_HASH_KLAP, DEVICE_CONFIG_AES, + DEVICE_CONFIG_AES_CAMERA, DEVICE_CONFIG_KLAP, IP_ADDRESS, IP_ADDRESS2, + IP_ADDRESS3, MAC_ADDRESS, MAC_ADDRESS2, + MAC_ADDRESS3, + MODEL_CAMERA, _mocked_device, ) from tests.common import MockConfigEntry +@contextmanager +def override_side_effect(mock: AsyncMock, effect): + """Temporarily override a mock side effect and replace afterwards.""" + try: + default_side_effect = mock.side_effect + mock.side_effect = effect + yield mock + finally: + mock.side_effect = default_side_effect + + +def _get_mock_devices(): + return { + IP_ADDRESS: _mocked_device( + device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), + credentials_hash=CREDENTIALS_HASH_KLAP, + ip_address=IP_ADDRESS, + ), + IP_ADDRESS2: _mocked_device( + device_config=DeviceConfig.from_dict(DEVICE_CONFIG_AES.to_dict()), + credentials_hash=CREDENTIALS_HASH_AES, + mac=MAC_ADDRESS2, + ip_address=IP_ADDRESS2, + ), + IP_ADDRESS3: _mocked_device( + device_config=DeviceConfig.from_dict(DEVICE_CONFIG_AES_CAMERA.to_dict()), + credentials_hash=CREDENTIALS_HASH_AES, + mac=MAC_ADDRESS3, + ip_address=IP_ADDRESS3, + modules=[Module.Camera], + alias=ALIAS_CAMERA, + model=MODEL_CAMERA, + ), + } + + @pytest.fixture def mock_discovery(): """Mock python-kasa discovery.""" @@ -34,22 +77,15 @@ def mock_discovery(): discover_single=DEFAULT, try_connect_all=DEFAULT, ) as mock_discovery: - device = _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), - credentials_hash=CREDENTIALS_HASH_KLAP, - alias="My Bulb", - ) - devices = { - "127.0.0.1": _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), - credentials_hash=CREDENTIALS_HASH_KLAP, - alias=None, - ) - } + devices = _get_mock_devices() + + def get_device(host, **kwargs): + return devices[host] + mock_discovery["discover"].return_value = devices - mock_discovery["discover_single"].return_value = device - mock_discovery["try_connect_all"].return_value = device - mock_discovery["mock_device"] = device + mock_discovery["discover_single"].side_effect = get_device + mock_discovery["try_connect_all"].side_effect = get_device + mock_discovery["mock_devices"] = devices yield mock_discovery @@ -57,22 +93,9 @@ def mock_discovery(): def mock_connect(): """Mock python-kasa connect.""" with patch("homeassistant.components.tplink.Device.connect") as mock_connect: - devices = { - IP_ADDRESS: _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), - credentials_hash=CREDENTIALS_HASH_KLAP, - ip_address=IP_ADDRESS, - ), - IP_ADDRESS2: _mocked_device( - device_config=DeviceConfig.from_dict(DEVICE_CONFIG_AES.to_dict()), - credentials_hash=CREDENTIALS_HASH_AES, - mac=MAC_ADDRESS2, - ip_address=IP_ADDRESS2, - ), - } + devices = _get_mock_devices() def get_device(config): - nonlocal devices return devices[config.host] mock_connect.side_effect = get_device @@ -117,6 +140,17 @@ def mock_config_entry() -> MockConfigEntry: ) +@pytest.fixture +def mock_camera_config_entry() -> MockConfigEntry: + """Mock camera ConfigEntry.""" + return MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data={**CREATE_ENTRY_DATA_AES_CAMERA}, + unique_id=MAC_ADDRESS3, + ) + + @pytest.fixture async def mock_added_config_entry( hass: HomeAssistant, diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index d822bfc9b57..a54edf56c62 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -320,5 +320,35 @@ "type": "Sensor", "category": "Info", "value": "2024-06-24 10:03:11.046643+01:00" + }, + "pan_left": { + "value": "", + "type": "Action", + "category": "Config" + }, + "pan_right": { + "value": "", + "type": "Action", + "category": "Config" + }, + "pan_step": { + "value": 10, + "type": "Number", + "category": "Config" + }, + "tilt_up": { + "value": "", + "type": "Action", + "category": "Config" + }, + "tilt_down": { + "value": "", + "type": "Action", + "category": "Config" + }, + "tilt_step": { + "value": 10, + "type": "Number", + "category": "Config" } } diff --git a/tests/components/tplink/snapshots/test_camera.ambr b/tests/components/tplink/snapshots/test_camera.ambr new file mode 100644 index 00000000000..4ce1813d704 --- /dev/null +++ b/tests/components/tplink/snapshots/test_camera.ambr @@ -0,0 +1,87 @@ +# serializer version: 1 +# name: test_states[camera.my_camera_live_view-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.my_camera_live_view', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Live view', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'live_view', + 'unique_id': "123456789ABCDEFGH-TPLinkCameraEntityDescription(key='live_view', device_class=None, entity_category=None, entity_registry_enabled_default=True, entity_registry_visible_default=True, force_update=False, icon=None, has_entity_name=False, name=, translation_key='live_view', translation_placeholders=None, unit_of_measurement=None, deprecated_info=None)", + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.my_camera_live_view-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'entity_picture': '/api/camera_proxy/camera.my_camera_live_view?token=1caab5c3b3', + 'friendly_name': 'my_camera Live view', + 'frontend_stream_type': , + 'supported_features': , + }), + 'context': , + 'entity_id': 'camera.my_camera_live_view', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[my_camera-entry] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'mac', + '66:55:44:33:22:11', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '1.0.0', + 'id': , + 'identifiers': set({ + tuple( + 'tplink', + '123456789ABCDEFGH', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'TP-Link', + 'model': 'HS100', + 'model_id': None, + 'name': 'my_camera', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '1.0.0', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/tplink/test_camera.py b/tests/components/tplink/test_camera.py new file mode 100644 index 00000000000..d8b0f82e32a --- /dev/null +++ b/tests/components/tplink/test_camera.py @@ -0,0 +1,431 @@ +"""The tests for the tplink camera platform.""" + +import asyncio +from unittest.mock import AsyncMock, patch + +from aiohttp.test_utils import make_mocked_request +from freezegun.api import FrozenDateTimeFactory +from kasa import Module +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components import stream +from homeassistant.components.camera import ( + CameraEntityFeature, + StreamType, + async_get_image, + async_get_mjpeg_stream, + get_camera_from_entity_id, +) +from homeassistant.components.tplink.camera import TPLinkCameraEntity +from homeassistant.components.websocket_api import TYPE_RESULT +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, HomeAssistantError +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from . import ( + IP_ADDRESS3, + MAC_ADDRESS3, + SMALLEST_VALID_JPEG_BYTES, + _mocked_device, + setup_platform_for_device, + snapshot_platform, +) + +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import WebSocketGenerator + + +async def test_states( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test states.""" + mock_camera_config_entry.add_to_hass(hass) + + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + # Patch getrandbits so the access_token doesn't change on camera attributes + with patch("random.SystemRandom.getrandbits", return_value=123123123123): + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + await snapshot_platform( + hass, + entity_registry, + device_registry, + snapshot, + mock_camera_config_entry.entry_id, + ) + + +async def test_handle_mjpeg_stream( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test handle_async_mjpeg_stream.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.my_camera_live_view" + ) + assert stream is not None + + +async def test_handle_mjpeg_stream_not_supported( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test handle_async_mjpeg_stream.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + mock_camera = mock_device.modules[Module.Camera] + + mock_camera.stream_rtsp_url.return_value = None + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.my_camera_live_view" + ) + assert stream is None + + +async def test_camera_image( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test async_get_image.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + with patch( + "homeassistant.components.ffmpeg.async_get_image", + return_value=SMALLEST_VALID_JPEG_BYTES, + ) as mock_get_image: + image = await async_get_image(hass, "camera.my_camera_live_view") + assert image + assert image.content == SMALLEST_VALID_JPEG_BYTES + mock_get_image.assert_called_once() + + mock_get_image.reset_mock() + image = await async_get_image(hass, "camera.my_camera_live_view") + mock_get_image.assert_not_called() + + freezer.tick(TPLinkCameraEntity.IMAGE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + mock_get_image.reset_mock() + image = await async_get_image(hass, "camera.my_camera_live_view") + mock_get_image.assert_called_once() + + freezer.tick(TPLinkCameraEntity.IMAGE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + # Test image returns None + with patch( + "homeassistant.components.ffmpeg.async_get_image", + return_value=None, + ) as mock_get_image: + msg = f"None camera image returned for {IP_ADDRESS3}" + assert msg not in caplog.text + + mock_get_image.reset_mock() + image = await async_get_image(hass, "camera.my_camera_live_view") + mock_get_image.assert_called_once() + + assert msg in caplog.text + + +async def test_no_camera_image_when_streaming( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test async_get_image.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + with patch( + "homeassistant.components.ffmpeg.async_get_image", + return_value=SMALLEST_VALID_JPEG_BYTES, + ) as mock_get_image: + await async_get_image(hass, "camera.my_camera_live_view") + mock_get_image.assert_called_once() + + freezer.tick(TPLinkCameraEntity.IMAGE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + start_event = asyncio.Event() + finish_event = asyncio.Event() + + async def _waiter(*_, **__): + start_event.set() + await finish_event.wait() + + async def _get_stream(): + mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) + await async_get_mjpeg_stream( + hass, mock_request, "camera.my_camera_live_view" + ) + + mock_get_image.reset_mock() + with patch( + "homeassistant.components.tplink.camera.async_aiohttp_proxy_stream", + new=_waiter, + ): + task = asyncio.create_task(_get_stream()) + await start_event.wait() + await async_get_image(hass, "camera.my_camera_live_view") + finish_event.set() + await task + + mock_get_image.assert_not_called() + + +async def test_no_concurrent_camera_image( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test async_get_image.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + finish_event = asyncio.Event() + call_count = 0 + + async def _waiter(*_, **__): + nonlocal call_count + call_count += 1 + await finish_event.wait() + return SMALLEST_VALID_JPEG_BYTES + + with patch( + "homeassistant.components.ffmpeg.async_get_image", + new=_waiter, + ): + tasks = asyncio.gather( + async_get_image(hass, "camera.my_camera_live_view"), + async_get_image(hass, "camera.my_camera_live_view"), + ) + # Sleep to give both tasks chance to get to th asyncio.Lock() + await asyncio.sleep(0) + finish_event.set() + results = await tasks + assert len(results) == 2 + assert all(img and img.content == SMALLEST_VALID_JPEG_BYTES for img in results) + assert call_count == 1 + + +async def test_camera_image_auth_error( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + mock_connect: AsyncMock, + mock_discovery: AsyncMock, +) -> None: + """Test async_get_image.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 0 + + with ( + patch( + "homeassistant.components.ffmpeg.async_get_image", + return_value=b"", + ), + patch( + "homeassistant.components.stream.async_check_stream_client_error", + side_effect=stream.StreamOpenClientError( + stream_client_error=stream.StreamClientError.Unauthorized + ), + ), + pytest.raises(HomeAssistantError), + ): + await async_get_image(hass, "camera.my_camera_live_view") + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + + assert result["step_id"] == "camera_auth_confirm" + + +async def test_camera_stream_source( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test async_get_image. + + This test would fail if the integration didn't properly + put stream in the dependencies. + """ + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {"type": "camera/stream", "entity_id": "camera.my_camera_live_view"} + ) + msg = await client.receive_json() + + # Assert WebSocket response + assert msg["type"] == TYPE_RESULT + assert msg["success"] + assert "url" in msg["result"] + + +async def test_camera_stream_attributes( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, +) -> None: + """Test stream attributes.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + supported_features = state.attributes.get("supported_features") + assert supported_features is CameraEntityFeature.STREAM | CameraEntityFeature.ON_OFF + camera = get_camera_from_entity_id(hass, "camera.my_camera_live_view") + assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} + + +async def test_camera_turn_on_off( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, +) -> None: + """Test camera turn on and off.""" + mock_device = _mocked_device( + modules=[Module.Camera], + alias="my_camera", + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + ) + mock_camera = mock_device.modules[Module.Camera] + + await setup_platform_for_device( + hass, mock_camera_config_entry, Platform.CAMERA, mock_device + ) + + state = hass.states.get("camera.my_camera_live_view") + assert state is not None + + await hass.services.async_call( + "camera", + "turn_on", + {"entity_id": "camera.my_camera_live_view"}, + blocking=True, + ) + mock_camera.set_state.assert_called_with(True) + + await hass.services.async_call( + "camera", + "turn_off", + {"entity_id": "camera.my_camera_live_view"}, + blocking=True, + ) + mock_camera.set_state.assert_called_with(False) diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index 2697696c667..980fd0a3f51 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -1,14 +1,13 @@ """Test the tplink config flow.""" -from contextlib import contextmanager import logging from unittest.mock import ANY, AsyncMock, patch -from kasa import TimeoutError +from kasa import Module, TimeoutError import pytest from homeassistant import config_entries -from homeassistant.components import dhcp +from homeassistant.components import dhcp, stream from homeassistant.components.tplink import ( DOMAIN, AuthenticationError, @@ -19,9 +18,11 @@ from homeassistant.components.tplink import ( ) from homeassistant.components.tplink.config_flow import TPLinkConfigFlow from homeassistant.components.tplink.const import ( + CONF_CAMERA_CREDENTIALS, CONF_CONNECTION_PARAMETERS, CONF_CREDENTIALS_HASH, CONF_DEVICE_CONFIG, + CONF_LIVE_VIEW, ) from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( @@ -39,44 +40,43 @@ from homeassistant.data_entry_flow import FlowResultType from . import ( AES_KEYS, ALIAS, + ALIAS_CAMERA, CONN_PARAMS_AES, CONN_PARAMS_KLAP, CONN_PARAMS_LEGACY, CREATE_ENTRY_DATA_AES, + CREATE_ENTRY_DATA_AES_CAMERA, CREATE_ENTRY_DATA_KLAP, CREATE_ENTRY_DATA_LEGACY, CREDENTIALS_HASH_AES, CREDENTIALS_HASH_KLAP, DEFAULT_ENTRY_TITLE, + DEFAULT_ENTRY_TITLE_CAMERA, DEVICE_CONFIG_AES, + DEVICE_CONFIG_AES_CAMERA, DEVICE_CONFIG_DICT_KLAP, DEVICE_CONFIG_KLAP, DEVICE_CONFIG_LEGACY, DHCP_FORMATTED_MAC_ADDRESS, IP_ADDRESS, + IP_ADDRESS2, + IP_ADDRESS3, MAC_ADDRESS, MAC_ADDRESS2, + MAC_ADDRESS3, + MODEL_CAMERA, MODULE, + SMALLEST_VALID_JPEG_BYTES, _mocked_device, _patch_connect, _patch_discovery, _patch_single_discovery, ) +from .conftest import override_side_effect from tests.common import MockConfigEntry -@contextmanager -def override_side_effect(mock: AsyncMock, effect): - """Temporarily override a mock side effect and replace afterwards.""" - try: - default_side_effect = mock.side_effect - mock.side_effect = effect - yield mock - finally: - mock.side_effect = default_side_effect - - @pytest.mark.parametrize( ("device_config", "expected_entry_data", "credentials_hash"), [ @@ -98,6 +98,7 @@ async def test_discovery( device_config=device_config, credentials_hash=credentials_hash, ip_address=ip_address, + mac=MAC_ADDRESS, ) with ( _patch_discovery(device, ip_address=ip_address), @@ -143,7 +144,7 @@ async def test_discovery( result["flow_id"], {CONF_DEVICE: MAC_ADDRESS}, ) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == DEFAULT_ENTRY_TITLE @@ -167,13 +168,142 @@ async def test_discovery( assert result2["reason"] == "no_devices_found" +async def test_discovery_camera( + hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init +) -> None: + """Test authenticated discovery for camera with stream.""" + mock_device = _mocked_device( + alias=ALIAS_CAMERA, + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + model=MODEL_CAMERA, + device_config=DEVICE_CONFIG_AES_CAMERA, + credentials_hash=CREDENTIALS_HASH_AES, + modules=[Module.Camera], + ) + + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS3, + CONF_MAC: MAC_ADDRESS3, + CONF_ALIAS: ALIAS, + CONF_DEVICE: mock_device, + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + assert not result["errors"] + + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert not result["errors"] + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_ENTRY_TITLE_CAMERA + assert result["data"] == CREATE_ENTRY_DATA_AES_CAMERA + assert result["context"]["unique_id"] == MAC_ADDRESS3 + + +async def test_discovery_pick_device_camera( + hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init +) -> None: + """Test authenticated discovery for camera with stream.""" + mock_device = _mocked_device( + alias=ALIAS_CAMERA, + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + model=MODEL_CAMERA, + device_config=DEVICE_CONFIG_AES_CAMERA, + credentials_hash=CREDENTIALS_HASH_AES, + modules=[Module.Camera], + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + with override_side_effect( + mock_discovery["discover"], lambda *_, **__: {IP_ADDRESS3: mock_device} + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "pick_device" + assert not result["errors"] + + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_DEVICE: MAC_ADDRESS3}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert not result["errors"] + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_ENTRY_TITLE_CAMERA + assert result["data"] == CREATE_ENTRY_DATA_AES_CAMERA + assert result["context"]["unique_id"] == MAC_ADDRESS3 + + async def test_discovery_auth( hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init ) -> None: """Test authenticated discovery.""" - - mock_device = mock_connect["mock_devices"][IP_ADDRESS] - assert mock_device.config == DEVICE_CONFIG_KLAP + mock_device = _mocked_device( + alias=ALIAS, + ip_address=IP_ADDRESS, + mac=MAC_ADDRESS, + device_config=DEVICE_CONFIG_KLAP, + credentials_hash=CREDENTIALS_HASH_KLAP, + ) with override_side_effect(mock_connect["connect"], AuthenticationError): result = await hass.config_entries.flow.async_init( @@ -191,13 +321,14 @@ async def test_discovery_auth( assert result["step_id"] == "discovery_auth_confirm" assert not result["errors"] - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == DEFAULT_ENTRY_TITLE @@ -205,6 +336,69 @@ async def test_discovery_auth( assert result2["context"]["unique_id"] == MAC_ADDRESS +async def test_discovery_auth_camera( + hass: HomeAssistant, mock_discovery: AsyncMock, mock_connect: AsyncMock, mock_init +) -> None: + """Test authenticated discovery for camera with stream.""" + mock_device = _mocked_device( + alias=ALIAS_CAMERA, + ip_address=IP_ADDRESS3, + mac=MAC_ADDRESS3, + model=MODEL_CAMERA, + device_config=DEVICE_CONFIG_AES_CAMERA, + credentials_hash=CREDENTIALS_HASH_AES, + modules=[Module.Camera], + ) + + with override_side_effect(mock_connect["connect"], AuthenticationError): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_HOST: IP_ADDRESS3, + CONF_MAC: MAC_ADDRESS3, + CONF_ALIAS: ALIAS, + CONF_DEVICE: mock_device, + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_auth_confirm" + assert not result["errors"] + + with override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert not result["errors"] + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_ENTRY_TITLE_CAMERA + assert result["data"] == CREATE_ENTRY_DATA_AES_CAMERA + assert result["context"]["unique_id"] == MAC_ADDRESS3 + + @pytest.mark.parametrize( ("error_type", "errors_msg", "error_placement"), [ @@ -385,7 +579,7 @@ async def test_discovery_new_credentials_invalid( async def test_discovery_with_existing_device_present(hass: HomeAssistant) -> None: """Test setting up discovery.""" config_entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "127.0.0.2"}, unique_id="dd:dd:dd:dd:dd:dd" + domain=DOMAIN, data={CONF_HOST: IP_ADDRESS2}, unique_id="dd:dd:dd:dd:dd:dd" ) config_entry.add_to_hass(hass) @@ -535,6 +729,227 @@ async def test_manual(hass: HomeAssistant) -> None: assert result2["reason"] == "already_configured" +async def test_manual_camera( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test manual camera.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS3} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + + # Test no username or pass + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert result["errors"] == {"base": "camera_creds"} + + # Test unknown error + with ( + patch( + "homeassistant.components.stream.async_check_stream_client_error", + side_effect=stream.StreamOpenClientError( + stream_client_error=stream.StreamClientError.NotFound + ), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert result["errors"] == {"base": "cannot_connect_camera"} + assert "error" in result["description_placeholders"] + + # Test unknown error + with ( + patch( + "homeassistant.components.stream.async_check_stream_client_error", + side_effect=stream.StreamOpenClientError( + stream_client_error=stream.StreamClientError.Unauthorized + ), + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert result["errors"] == {"base": "invalid_camera_auth"} + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_CAMERA_CREDENTIALS] == { + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + } + assert result["data"][CONF_LIVE_VIEW] is True + + +@pytest.mark.parametrize( + "exception", + [ + pytest.param( + stream.StreamOpenClientError( + stream_client_error=stream.StreamClientError.NotFound + ), + id="open_client_error", + ), + pytest.param(Exception(), id="other_error"), + ], +) +async def test_manual_camera_no_hls( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, + exception: Exception, +) -> None: + """Test manual camera when hls stream fails but mpeg stream works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS3} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + + # Test stream error + with ( + patch( + "homeassistant.components.stream.async_check_stream_client_error", + side_effect=exception, + ), + patch("homeassistant.components.ffmpeg.async_get_image", return_value=None), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + assert result["errors"] == {"base": "cannot_connect_camera"} + assert "error" in result["description_placeholders"] + + # async_get_image will succeed + with ( + patch( + "homeassistant.components.stream.async_check_stream_client_error", + side_effect=exception, + ), + patch( + "homeassistant.components.ffmpeg.async_get_image", + return_value=SMALLEST_VALID_JPEG_BYTES, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_CAMERA_CREDENTIALS] == { + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + } + assert result["data"][CONF_LIVE_VIEW] is True + + +async def test_manual_camera_no_live_view( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test manual camera.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS3} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: False, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert CONF_CAMERA_CREDENTIALS not in result["data"] + assert result["data"][CONF_LIVE_VIEW] is False + + async def test_manual_no_capabilities(hass: HomeAssistant) -> None: """Test manually setup without successful get_capabilities.""" result = await hass.config_entries.flow.async_init( @@ -575,7 +990,7 @@ async def test_manual_auth( assert result["step_id"] == "user" assert not result["errors"] - mock_discovery["mock_device"].update.side_effect = AuthenticationError + mock_discovery["mock_devices"][IP_ADDRESS].update.side_effect = AuthenticationError result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_HOST: IP_ADDRESS} @@ -586,7 +1001,7 @@ async def test_manual_auth( assert result2["step_id"] == "user_auth_confirm" assert not result2["errors"] - mock_discovery["mock_device"].update.reset_mock(side_effect=True) + mock_discovery["mock_devices"][IP_ADDRESS].update.reset_mock(side_effect=True) result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], @@ -602,6 +1017,63 @@ async def test_manual_auth( assert result3["context"]["unique_id"] == MAC_ADDRESS +async def test_manual_auth_camera( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test manual camera.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + with override_side_effect( + mock_discovery["mock_devices"][IP_ADDRESS3].update, AuthenticationError + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS3} + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user_auth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "foobar", + CONF_PASSWORD: "foobar", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_CAMERA_CREDENTIALS] == { + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + } + assert result["data"][CONF_LIVE_VIEW] is True + + @pytest.mark.parametrize( ("error_type", "errors_msg", "error_placement"), [ @@ -627,7 +1099,7 @@ async def test_manual_auth_errors( assert result["step_id"] == "user" assert not result["errors"] - mock_discovery["mock_device"].update.side_effect = AuthenticationError + mock_discovery["mock_devices"][IP_ADDRESS].update.side_effect = AuthenticationError with override_side_effect(mock_connect["connect"], error_type): result2 = await hass.config_entries.flow.async_configure( @@ -682,11 +1154,27 @@ async def test_manual_port_override( port, ) -> None: """Test manually setup.""" - mock_discovery["mock_device"].config.port_override = port - mock_discovery["mock_device"].host = host - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + config = DeviceConfig( + host, + credentials=None, + port_override=port, + uses_http=True, + connection_type=CONN_PARAMS_KLAP, ) + mock_device = _mocked_device( + alias=ALIAS, + ip_address=host, + mac=MAC_ADDRESS, + device_config=config, + credentials_hash=CREDENTIALS_HASH_KLAP, + ) + + with override_side_effect( + mock_discovery["try_connect_all"], lambda *_, **__: mock_device + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert not result["errors"] @@ -696,23 +1184,29 @@ async def test_manual_port_override( mock_discovery["discover_single"].side_effect = TimeoutError mock_connect["connect"].side_effect = AuthenticationError - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: host_str} - ) - await hass.async_block_till_done() + with override_side_effect( + mock_discovery["try_connect_all"], lambda *_, **__: mock_device + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: host_str} + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "user_auth_confirm" assert not result2["errors"] creds = Credentials("fake_username", "fake_password") - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) + with override_side_effect( + mock_discovery["try_connect_all"], lambda *_, **__: mock_device + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) await hass.async_block_till_done() mock_discovery["try_connect_all"].assert_called_once_with( host, credentials=creds, port=port, http_client=ANY @@ -744,7 +1238,7 @@ async def test_manual_port_override_invalid( await hass.async_block_till_done() mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=None, port=None + IP_ADDRESS, credentials=None, port=None ) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -941,7 +1435,7 @@ async def test_integration_discovery_with_ip_change( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_LEGACY.to_dict() ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS mocked_device = _mocked_device(device_config=DEVICE_CONFIG_KLAP) with override_side_effect(mock_connect["connect"], lambda *_, **__: mocked_device): @@ -949,7 +1443,7 @@ async def test_integration_discovery_with_ip_change( DOMAIN, context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, data={ - CONF_HOST: "127.0.0.2", + CONF_HOST: IP_ADDRESS2, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, CONF_DEVICE: mocked_device, @@ -961,7 +1455,7 @@ async def test_integration_discovery_with_ip_change( assert ( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_KLAP) @@ -984,8 +1478,8 @@ async def test_integration_discovery_with_ip_change( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED # Check that init set the new host correctly before calling connect - assert config.host == "127.0.0.1" - config.host = "127.0.0.2" + assert config.host == IP_ADDRESS + config.host = IP_ADDRESS2 config.uses_http = False # Not passed in to new config class config.http_client = "Foo" mock_connect["connect"].assert_awaited_once_with(config=config) @@ -1024,7 +1518,7 @@ async def test_integration_discovery_with_connection_change( ) == 0 ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 assert ( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_AES.to_dict() ) @@ -1034,7 +1528,7 @@ async def test_integration_discovery_with_connection_change( NEW_DEVICE_CONFIG = { **DEVICE_CONFIG_DICT_KLAP, "connection_type": CONN_PARAMS_KLAP.to_dict(), - CONF_HOST: "127.0.0.2", + CONF_HOST: IP_ADDRESS2, } config = DeviceConfig.from_dict(NEW_DEVICE_CONFIG) # Reset the connect mock so when the config flow reloads the entry it succeeds @@ -1055,7 +1549,7 @@ async def test_integration_discovery_with_connection_change( DOMAIN, context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, data={ - CONF_HOST: "127.0.0.2", + CONF_HOST: IP_ADDRESS2, CONF_MAC: MAC_ADDRESS2, CONF_ALIAS: ALIAS, CONF_DEVICE: bulb, @@ -1067,12 +1561,12 @@ async def test_integration_discovery_with_connection_change( assert ( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 assert CREDENTIALS_HASH_AES not in mock_config_entry.data assert mock_config_entry.state is ConfigEntryState.LOADED - config.host = "127.0.0.2" + config.host = IP_ADDRESS2 config.uses_http = False # Not passed in to new config class config.http_client = "Foo" config.aes_keys = AES_KEYS @@ -1097,18 +1591,18 @@ async def test_dhcp_discovery_with_ip_change( flows = hass.config_entries.flow.async_progress() assert len(flows) == 0 - assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS discovery_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp.DhcpServiceInfo( - ip="127.0.0.2", macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS + ip=IP_ADDRESS2, macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS ), ) assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 async def test_dhcp_discovery_discover_fail( @@ -1121,14 +1615,14 @@ async def test_dhcp_discovery_discover_fail( flows = hass.config_entries.flow.async_progress() assert len(flows) == 0 - assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS with override_side_effect(mock_discovery["discover_single"], TimeoutError): discovery_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp.DhcpServiceInfo( - ip="127.0.0.2", macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS + ip=IP_ADDRESS2, macaddress=DHCP_FORMATTED_MAC_ADDRESS, hostname=ALIAS ), ) assert discovery_result["type"] is FlowResultType.ABORT @@ -1160,15 +1654,58 @@ async def test_reauth( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) - mock_discovery["mock_device"].update.assert_called_once_with() + mock_discovery["mock_devices"][IP_ADDRESS].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" await hass.async_block_till_done() +async def test_reauth_camera( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test async_get_image.""" + mock_device = mock_connect["mock_devices"][IP_ADDRESS3] + mock_camera_config_entry.add_to_hass(hass) + mock_camera_config_entry.async_start_reauth( + hass, + config_entries.ConfigFlowContext( + reauth_source=CONF_CAMERA_CREDENTIALS, # type: ignore[typeddict-unknown-key] + ), + {"device": mock_device}, + ) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + + assert result["step_id"] == "camera_auth_confirm" + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser2", + CONF_PASSWORD: "campass2", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert dict(mock_camera_config_entry.data) == { + **CREATE_ENTRY_DATA_AES_CAMERA, + CONF_CAMERA_CREDENTIALS: {CONF_USERNAME: "camuser2", CONF_PASSWORD: "campass2"}, + } + + async def test_reauth_try_connect_all( hass: HomeAssistant, mock_added_config_entry: MockConfigEntry, @@ -1195,7 +1732,7 @@ async def test_reauth_try_connect_all( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) mock_discovery["try_connect_all"].assert_called_once() assert result2["type"] is FlowResultType.ABORT @@ -1233,7 +1770,7 @@ async def test_reauth_try_connect_all_fail( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) mock_discovery["try_connect_all"].assert_called_once() assert result2["errors"] == {"base": "cannot_connect"} @@ -1278,40 +1815,48 @@ async def test_reauth_update_with_encryption_change( assert CONF_CREDENTIALS_HASH not in mock_config_entry.data new_config = DeviceConfig( - "127.0.0.2", + IP_ADDRESS2, credentials=None, connection_type=Device.ConnectionParameters( Device.Family.SmartTapoPlug, Device.EncryptionType.Klap ), uses_http=True, ) - mock_discovery["mock_device"].host = "127.0.0.2" - mock_discovery["mock_device"].config = new_config - mock_discovery["mock_device"].credentials_hash = None - mock_connect["mock_devices"]["127.0.0.2"].config = new_config - mock_connect["mock_devices"]["127.0.0.2"].credentials_hash = CREDENTIALS_HASH_KLAP - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, + mock_device = _mocked_device( + alias="my_device", + ip_address=IP_ADDRESS2, + mac=MAC_ADDRESS2, + device_config=new_config, + credentials_hash=CREDENTIALS_HASH_KLAP, ) - await hass.async_block_till_done(wait_background_tasks=True) + + with ( + override_side_effect( + mock_discovery["discover_single"], lambda *_, **__: mock_device + ), + override_side_effect(mock_connect["connect"], lambda *_, **__: mock_device), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done(wait_background_tasks=True) assert "Connection type changed for 127.0.0.2" in caplog.text credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.2", credentials=credentials, port=None + IP_ADDRESS2, credentials=credentials, port=None ) - mock_discovery["mock_device"].update.assert_called_once_with() + mock_device.update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" assert mock_config_entry.state is ConfigEntryState.LOADED assert ( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_KLAP @@ -1398,7 +1943,7 @@ async def test_reauth_update_from_discovery_with_ip_change( DOMAIN, context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, data={ - CONF_HOST: "127.0.0.2", + CONF_HOST: IP_ADDRESS2, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, CONF_DEVICE: device, @@ -1410,7 +1955,7 @@ async def test_reauth_update_from_discovery_with_ip_change( assert ( mock_config_entry.data[CONF_CONNECTION_PARAMETERS] == CONN_PARAMS_KLAP.to_dict() ) - assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS2 async def test_reauth_no_update_if_config_and_ip_the_same( @@ -1493,26 +2038,27 @@ async def test_reauth_errors( [result] = flows assert result["step_id"] == "reauth_confirm" - mock_discovery["mock_device"].update.side_effect = error_type - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_USERNAME: "fake_username", - CONF_PASSWORD: "fake_password", - }, - ) + mock_device = mock_discovery["mock_devices"][IP_ADDRESS] + with override_side_effect(mock_device.update, error_type): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) - mock_discovery["mock_device"].update.assert_called_once_with() + mock_device.update.assert_called_once_with() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {error_placement: errors_msg} assert result2["description_placeholders"]["error"] == str(error_type) mock_discovery["discover_single"].reset_mock() - mock_discovery["mock_device"].update.reset_mock(side_effect=True) + mock_device.update.reset_mock(side_effect=True) result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={ @@ -1522,9 +2068,9 @@ async def test_reauth_errors( ) mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) - mock_discovery["mock_device"].update.assert_called_once_with() + mock_device.update.assert_called_once_with() assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "reauth_successful" @@ -1731,12 +2277,169 @@ async def test_reauth_update_other_flows( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials, port=None + IP_ADDRESS, credentials=credentials, port=None ) - mock_discovery["mock_device"].update.assert_called_once_with() + mock_discovery["mock_devices"][IP_ADDRESS].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" await hass.async_block_till_done() flows = hass.config_entries.flow.async_progress() assert len(flows) == 0 + + +async def test_reconfigure( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reconfigure flow.""" + result = await mock_added_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + +async def test_reconfigure_auth_discovered( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reconfigure auth flow for device that's discovered.""" + result = await mock_added_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + # Simulate a bad host + with ( + override_side_effect( + mock_discovery["mock_devices"][IP_ADDRESS].update, KasaException + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "WRONG_IP", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {"base": "cannot_connect"} + assert "error" in result["description_placeholders"] + + with ( + override_side_effect( + mock_discovery["mock_devices"][IP_ADDRESS].update, AuthenticationError + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user_auth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "user", + CONF_PASSWORD: "pass", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + +async def test_reconfigure_auth_try_connect_all( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reconfigure auth flow for device that's not discovered.""" + result = await mock_added_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + with ( + override_side_effect(mock_discovery["discover_single"], TimeoutError), + override_side_effect(mock_connect["connect"], KasaException), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user_auth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "user", + CONF_PASSWORD: "pass", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + +async def test_reconfigure_camera( + hass: HomeAssistant, + mock_camera_config_entry: MockConfigEntry, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, +) -> None: + """Test reconfigure flow.""" + mock_camera_config_entry.add_to_hass(hass) + result = await mock_camera_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: IP_ADDRESS3, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "camera_auth_confirm" + + with patch( + "homeassistant.components.stream.async_check_stream_client_error", + return_value=None, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_LIVE_VIEW: True, + CONF_USERNAME: "camuser", + CONF_PASSWORD: "campass", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index dd967e0e0d6..8dad8881b9b 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -59,6 +59,7 @@ from . import ( _patch_discovery, _patch_single_discovery, ) +from .conftest import override_side_effect from tests.common import MockConfigEntry, async_fire_time_changed @@ -70,6 +71,7 @@ async def test_configuring_tplink_causes_discovery( with ( patch("homeassistant.components.tplink.Discover.discover") as discover, patch("homeassistant.components.tplink.Discover.discover_single"), + patch("homeassistant.components.tplink.Device.connect"), ): discover.return_value = {MagicMock(): MagicMock()} await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) @@ -221,8 +223,12 @@ async def test_config_entry_with_stored_credentials( hass.data.setdefault(DOMAIN, {})[CONF_AUTHENTICATION] = auth mock_config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.tplink.async_create_clientsession", return_value="Foo" + with ( + patch( + "homeassistant.components.tplink.async_create_clientsession", + return_value="Foo", + ), + override_side_effect(mock_discovery["discover"], lambda *_, **__: {}), ): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() From 07322c69925dacf66016250e7963d06bb38e2ba8 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sun, 22 Dec 2024 19:57:34 +0100 Subject: [PATCH 637/677] Add reconfigure flow to slide_local (#133669) --- .../components/slide_local/config_flow.py | 41 ++++++++++++++++++- .../components/slide_local/quality_scale.yaml | 4 +- .../components/slide_local/strings.json | 15 ++++++- .../slide_local/test_config_flow.py | 30 ++++++++++++++ 4 files changed, 86 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/slide_local/config_flow.py b/homeassistant/components/slide_local/config_flow.py index 3ccc89be375..23c509a02dc 100644 --- a/homeassistant/components/slide_local/config_flow.py +++ b/homeassistant/components/slide_local/config_flow.py @@ -103,7 +103,7 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the user step.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: if not (errors := await self.async_test_connection(user_input)): await self.async_set_unique_id(self._mac) @@ -136,6 +136,45 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + errors: dict[str, str] = {} + + if user_input is not None: + if not (errors := await self.async_test_connection(user_input)): + await self.async_set_unique_id(self._mac) + self._abort_if_unique_id_mismatch( + description_placeholders={CONF_MAC: self._mac} + ) + user_input |= { + CONF_API_VERSION: self._api_version, + } + + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates=user_input, + ) + + entry: SlideConfigEntry = self._get_reconfigure_entry() + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_HOST): str, + } + ), + { + CONF_HOST: entry.data[CONF_HOST], + CONF_PASSWORD: entry.data.get(CONF_PASSWORD, ""), + }, + ), + errors=errors, + ) + async def async_step_zeroconf( self, discovery_info: ZeroconfServiceInfo ) -> ConfigFlowResult: diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 7a2be591927..54dfd87d98c 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -50,12 +50,12 @@ rules: diagnostics: done exception-translations: done icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done dynamic-devices: status: exempt comment: | Slide_local represents a single physical device, no dynamic changes of devices possible (besides removal of instance itself). - discovery-update-info: todo + discovery-update-info: done repair-issues: todo docs-use-cases: done docs-supported-devices: done diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index 6aeda9f92fd..b5fe88255a7 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -12,6 +12,17 @@ "password": "The device code of your Slide (inside of the Slide or in the box, length is 8 characters). If your Slide runs firmware version 2 this is optional, as it is not used by the local API." } }, + "reconfigure": { + "description": "Reconfigure the information for your Slide device", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "[%key:component::slide_local::config::step::user::data_description::host%]", + "password": "[%key:component::slide_local::config::step::user::data_description::password%]" + } + }, "zeroconf_confirm": { "title": "Confirm setup for Slide", "description": "Do you want to setup {host}?" @@ -19,7 +30,9 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "discovery_connection_failed": "The setup of the discovered device failed with the following error: {error}. Please try to set it up manually." + "discovery_connection_failed": "The setup of the discovered device failed with the following error: {error}. Please try to set it up manually.", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "unique_id_mismatch": "The mac address of the device ({mac}) does not match the previous mac address." }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", diff --git a/tests/components/slide_local/test_config_flow.py b/tests/components/slide_local/test_config_flow.py index 48be7dd7850..9f2923988ca 100644 --- a/tests/components/slide_local/test_config_flow.py +++ b/tests/components/slide_local/test_config_flow.py @@ -282,6 +282,36 @@ async def test_abort_if_already_setup( assert result["reason"] == "already_configured" +async def test_reconfigure( + hass: HomeAssistant, + mock_slide_api: AsyncMock, + mock_config_entry: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test reconfigure flow options.""" + + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "127.0.0.3", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert len(mock_setup_entry.mock_calls) == 1 + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.data[CONF_HOST] == "127.0.0.3" + + async def test_zeroconf( hass: HomeAssistant, mock_slide_api: AsyncMock, mock_setup_entry: AsyncMock ) -> None: From 0ad9af0febb080da2698e3951383e837d896627b Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Sun, 22 Dec 2024 20:23:55 +0100 Subject: [PATCH 638/677] Add already exists config flow tests for Ecovacs (#133572) Co-authored-by: Michael <35783820+mib1185@users.noreply.github.com> Co-authored-by: Franck Nijhof --- tests/components/ecovacs/test_config_flow.py | 120 +++++++++++-------- 1 file changed, 72 insertions(+), 48 deletions(-) diff --git a/tests/components/ecovacs/test_config_flow.py b/tests/components/ecovacs/test_config_flow.py index 5bf1144db0b..3a0cb188146 100644 --- a/tests/components/ecovacs/test_config_flow.py +++ b/tests/components/ecovacs/test_config_flow.py @@ -1,6 +1,7 @@ """Test Ecovacs config flow.""" from collections.abc import Awaitable, Callable +from dataclasses import dataclass, field import ssl from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -28,15 +29,20 @@ from .const import ( VALID_ENTRY_DATA_SELF_HOSTED_WITH_VALIDATE_CERT, ) +from tests.common import MockConfigEntry + _USER_STEP_SELF_HOSTED = {CONF_MODE: InstanceMode.SELF_HOSTED} -_TEST_FN_AUTH_ARG = "user_input_auth" -_TEST_FN_USER_ARG = "user_input_user" + +@dataclass +class _TestFnUserInput: + auth: dict[str, Any] + user: dict[str, Any] = field(default_factory=dict) async def _test_user_flow( hass: HomeAssistant, - user_input_auth: dict[str, Any], + user_input: _TestFnUserInput, ) -> dict[str, Any]: """Test config flow.""" result = await hass.config_entries.flow.async_init( @@ -50,15 +56,13 @@ async def _test_user_flow( return await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=user_input_auth, + user_input=user_input.auth, ) async def _test_user_flow_show_advanced_options( hass: HomeAssistant, - *, - user_input_auth: dict[str, Any], - user_input_user: dict[str, Any] | None = None, + user_input: _TestFnUserInput, ) -> dict[str, Any]: """Test config flow.""" result = await hass.config_entries.flow.async_init( @@ -72,7 +76,7 @@ async def _test_user_flow_show_advanced_options( result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=user_input_user or {}, + user_input=user_input.user, ) assert result["type"] is FlowResultType.FORM @@ -81,29 +85,26 @@ async def _test_user_flow_show_advanced_options( return await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=user_input_auth, + user_input=user_input.auth, ) @pytest.mark.parametrize( - ("test_fn", "test_fn_args", "entry_data"), + ("test_fn", "test_fn_user_input", "entry_data"), [ ( _test_user_flow_show_advanced_options, - {_TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_CLOUD}, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), VALID_ENTRY_DATA_CLOUD, ), ( _test_user_flow_show_advanced_options, - { - _TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_SELF_HOSTED, - _TEST_FN_USER_ARG: _USER_STEP_SELF_HOSTED, - }, + _TestFnUserInput(VALID_ENTRY_DATA_SELF_HOSTED, _USER_STEP_SELF_HOSTED), VALID_ENTRY_DATA_SELF_HOSTED, ), ( _test_user_flow, - {_TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_CLOUD}, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), VALID_ENTRY_DATA_CLOUD, ), ], @@ -114,18 +115,12 @@ async def test_user_flow( mock_setup_entry: AsyncMock, mock_authenticator_authenticate: AsyncMock, mock_mqtt_client: Mock, - test_fn: Callable[[HomeAssistant, dict[str, Any]], Awaitable[dict[str, Any]]] - | Callable[ - [HomeAssistant, dict[str, Any], dict[str, Any]], Awaitable[dict[str, Any]] - ], - test_fn_args: dict[str, Any], + test_fn: Callable[[HomeAssistant, _TestFnUserInput], Awaitable[dict[str, Any]]], + test_fn_user_input: _TestFnUserInput, entry_data: dict[str, Any], ) -> None: """Test the user config flow.""" - result = await test_fn( - hass, - **test_fn_args, - ) + result = await test_fn(hass, test_fn_user_input) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == entry_data[CONF_USERNAME] assert result["data"] == entry_data @@ -161,24 +156,21 @@ def _cannot_connect_error(user_input: dict[str, Any]) -> str: ids=["cannot_connect", "invalid_auth", "unknown"], ) @pytest.mark.parametrize( - ("test_fn", "test_fn_args", "entry_data"), + ("test_fn", "test_fn_user_input", "entry_data"), [ ( _test_user_flow_show_advanced_options, - {_TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_CLOUD}, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), VALID_ENTRY_DATA_CLOUD, ), ( _test_user_flow_show_advanced_options, - { - _TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_SELF_HOSTED, - _TEST_FN_USER_ARG: _USER_STEP_SELF_HOSTED, - }, + _TestFnUserInput(VALID_ENTRY_DATA_SELF_HOSTED, _USER_STEP_SELF_HOSTED), VALID_ENTRY_DATA_SELF_HOSTED_WITH_VALIDATE_CERT, ), ( _test_user_flow, - {_TEST_FN_AUTH_ARG: VALID_ENTRY_DATA_CLOUD}, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), VALID_ENTRY_DATA_CLOUD, ), ], @@ -193,22 +185,16 @@ async def test_user_flow_raise_error( reason_rest: str, side_effect_mqtt: Exception, errors_mqtt: Callable[[dict[str, Any]], str], - test_fn: Callable[[HomeAssistant, dict[str, Any]], Awaitable[dict[str, Any]]] - | Callable[ - [HomeAssistant, dict[str, Any], dict[str, Any]], Awaitable[dict[str, Any]] - ], - test_fn_args: dict[str, Any], + test_fn: Callable[[HomeAssistant, _TestFnUserInput], Awaitable[dict[str, Any]]], + test_fn_user_input: _TestFnUserInput, entry_data: dict[str, Any], ) -> None: """Test handling error on library calls.""" - user_input_auth = test_fn_args[_TEST_FN_AUTH_ARG] + user_input_auth = test_fn_user_input.auth # Authenticator raises error mock_authenticator_authenticate.side_effect = side_effect_rest - result = await test_fn( - hass, - **test_fn_args, - ) + result = await test_fn(hass, test_fn_user_input) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "auth" assert result["errors"] == {"base": reason_rest} @@ -256,12 +242,14 @@ async def test_user_flow_self_hosted_error( result = await _test_user_flow_show_advanced_options( hass, - user_input_auth=VALID_ENTRY_DATA_SELF_HOSTED - | { - CONF_OVERRIDE_REST_URL: "bla://localhost:8000", - CONF_OVERRIDE_MQTT_URL: "mqtt://", - }, - user_input_user=_USER_STEP_SELF_HOSTED, + _TestFnUserInput( + VALID_ENTRY_DATA_SELF_HOSTED + | { + CONF_OVERRIDE_REST_URL: "bla://localhost:8000", + CONF_OVERRIDE_MQTT_URL: "mqtt://", + }, + _USER_STEP_SELF_HOSTED, + ), ) assert result["type"] is FlowResultType.FORM @@ -298,3 +286,39 @@ async def test_user_flow_self_hosted_error( mock_setup_entry.assert_called() mock_authenticator_authenticate.assert_called() mock_mqtt_client.verify_config.assert_called() + + +@pytest.mark.parametrize( + ("test_fn", "test_fn_user_input"), + [ + ( + _test_user_flow_show_advanced_options, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), + ), + ( + _test_user_flow_show_advanced_options, + _TestFnUserInput(VALID_ENTRY_DATA_SELF_HOSTED, _USER_STEP_SELF_HOSTED), + ), + ( + _test_user_flow, + _TestFnUserInput(VALID_ENTRY_DATA_CLOUD), + ), + ], + ids=["advanced_cloud", "advanced_self_hosted", "cloud"], +) +async def test_already_exists( + hass: HomeAssistant, + test_fn: Callable[[HomeAssistant, _TestFnUserInput], Awaitable[dict[str, Any]]], + test_fn_user_input: _TestFnUserInput, +) -> None: + """Test we don't allow duplicated config entries.""" + MockConfigEntry(domain=DOMAIN, data=test_fn_user_input.auth).add_to_hass(hass) + + result = await test_fn( + hass, + test_fn_user_input, + ) + + assert result + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From 26180486e75186ddec03130088bd8b405814126b Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Sun, 22 Dec 2024 15:05:07 -0500 Subject: [PATCH 639/677] Add media browsing to Cambridge Audio (#129106) * Add media browsing to Cambridge Audio * Remove one folder logic * Remove class mapping for presets --- .../cambridge_audio/media_browser.py | 85 +++++++++++++++++++ .../cambridge_audio/media_player.py | 16 +++- .../fixtures/get_presets_list.json | 2 +- .../snapshots/test_diagnostics.ambr | 2 +- .../snapshots/test_media_browser.ambr | 39 +++++++++ .../cambridge_audio/test_media_browser.py | 61 +++++++++++++ 6 files changed, 201 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/cambridge_audio/media_browser.py create mode 100644 tests/components/cambridge_audio/snapshots/test_media_browser.ambr create mode 100644 tests/components/cambridge_audio/test_media_browser.py diff --git a/homeassistant/components/cambridge_audio/media_browser.py b/homeassistant/components/cambridge_audio/media_browser.py new file mode 100644 index 00000000000..efe55ee792e --- /dev/null +++ b/homeassistant/components/cambridge_audio/media_browser.py @@ -0,0 +1,85 @@ +"""Support for media browsing.""" + +from aiostreammagic import StreamMagicClient +from aiostreammagic.models import Preset + +from homeassistant.components.media_player import BrowseMedia, MediaClass +from homeassistant.core import HomeAssistant + + +async def async_browse_media( + hass: HomeAssistant, + client: StreamMagicClient, + media_content_id: str | None, + media_content_type: str | None, +) -> BrowseMedia: + """Browse media.""" + + if media_content_type == "presets": + return await _presets_payload(client.preset_list.presets) + + return await _root_payload( + hass, + client, + ) + + +async def _root_payload( + hass: HomeAssistant, + client: StreamMagicClient, +) -> BrowseMedia: + """Return root payload for Cambridge Audio.""" + children: list[BrowseMedia] = [] + + if client.preset_list.presets: + children.append( + BrowseMedia( + title="Presets", + media_class=MediaClass.DIRECTORY, + media_content_id="", + media_content_type="presets", + thumbnail="https://brands.home-assistant.io/_/cambridge_audio/logo.png", + can_play=False, + can_expand=True, + ) + ) + + return BrowseMedia( + title="Cambridge Audio", + media_class=MediaClass.DIRECTORY, + media_content_id="", + media_content_type="root", + can_play=False, + can_expand=True, + children=children, + ) + + +async def _presets_payload(presets: list[Preset]) -> BrowseMedia: + """Create payload to list presets.""" + + children: list[BrowseMedia] = [] + for preset in presets: + if preset.state != "OK": + continue + children.append( + BrowseMedia( + title=preset.name, + media_class=MediaClass.MUSIC, + media_content_id=str(preset.preset_id), + media_content_type="preset", + can_play=True, + can_expand=False, + thumbnail=preset.art_url, + ) + ) + + return BrowseMedia( + title="Presets", + media_class=MediaClass.DIRECTORY, + media_content_id="", + media_content_type="presets", + can_play=False, + can_expand=True, + children=children, + ) diff --git a/homeassistant/components/cambridge_audio/media_player.py b/homeassistant/components/cambridge_audio/media_player.py index 9896effb07d..042178d5781 100644 --- a/homeassistant/components/cambridge_audio/media_player.py +++ b/homeassistant/components/cambridge_audio/media_player.py @@ -13,6 +13,7 @@ from aiostreammagic import ( ) from homeassistant.components.media_player import ( + BrowseMedia, MediaPlayerDeviceClass, MediaPlayerEntity, MediaPlayerEntityFeature, @@ -24,7 +25,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import CambridgeAudioConfigEntry +from . import CambridgeAudioConfigEntry, media_browser from .const import ( CAMBRIDGE_MEDIA_TYPE_AIRABLE, CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO, @@ -34,7 +35,8 @@ from .const import ( from .entity import CambridgeAudioEntity, command BASE_FEATURES = ( - MediaPlayerEntityFeature.SELECT_SOURCE + MediaPlayerEntityFeature.BROWSE_MEDIA + | MediaPlayerEntityFeature.SELECT_SOURCE | MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.TURN_ON | MediaPlayerEntityFeature.PLAY_MEDIA @@ -338,3 +340,13 @@ class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity): if media_type == CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO: await self.client.play_radio_url("Radio", media_id) + + async def async_browse_media( + self, + media_content_type: MediaType | str | None = None, + media_content_id: str | None = None, + ) -> BrowseMedia: + """Implement the media browsing helper.""" + return await media_browser.async_browse_media( + self.hass, self.client, media_content_id, media_content_type + ) diff --git a/tests/components/cambridge_audio/fixtures/get_presets_list.json b/tests/components/cambridge_audio/fixtures/get_presets_list.json index 87d49e9fd30..6443b7dfbcf 100644 --- a/tests/components/cambridge_audio/fixtures/get_presets_list.json +++ b/tests/components/cambridge_audio/fixtures/get_presets_list.json @@ -28,7 +28,7 @@ "name": "Unknown Preset Type", "type": "Unknown", "class": "stream.unknown", - "state": "OK" + "state": "UNAVAILABLE" } ] } diff --git a/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr b/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr index 1ba9c4093f6..8de3ccea746 100644 --- a/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr +++ b/tests/components/cambridge_audio/snapshots/test_diagnostics.ambr @@ -78,7 +78,7 @@ 'name': 'Unknown Preset Type', 'preset_class': 'stream.unknown', 'preset_id': 3, - 'state': 'OK', + 'state': 'UNAVAILABLE', 'type': 'Unknown', }), ]), diff --git a/tests/components/cambridge_audio/snapshots/test_media_browser.ambr b/tests/components/cambridge_audio/snapshots/test_media_browser.ambr new file mode 100644 index 00000000000..180d5ed1bb0 --- /dev/null +++ b/tests/components/cambridge_audio/snapshots/test_media_browser.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_browse_media_root + list([ + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': '', + 'media_content_type': 'presets', + 'thumbnail': 'https://brands.home-assistant.io/_/cambridge_audio/logo.png', + 'title': 'Presets', + }), + ]) +# --- +# name: test_browse_presets + list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': 'music', + 'media_content_id': '1', + 'media_content_type': 'preset', + 'thumbnail': 'https://static.airable.io/43/68/432868.png', + 'title': 'Chicago House Radio', + }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': 'music', + 'media_content_id': '2', + 'media_content_type': 'preset', + 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27325a5a1ed28871e8e53e62d59', + 'title': 'Spotify: Good & Evil', + }), + ]) +# --- diff --git a/tests/components/cambridge_audio/test_media_browser.py b/tests/components/cambridge_audio/test_media_browser.py new file mode 100644 index 00000000000..da72cfab534 --- /dev/null +++ b/tests/components/cambridge_audio/test_media_browser.py @@ -0,0 +1,61 @@ +"""Tests for the Cambridge Audio media browser.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .const import ENTITY_ID + +from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator + + +async def test_browse_media_root( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the root browse page.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": ENTITY_ID, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"]["children"] == snapshot + + +async def test_browse_presets( + hass: HomeAssistant, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the presets browse page.""" + await setup_integration(hass, mock_config_entry) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": ENTITY_ID, + "media_content_type": "presets", + "media_content_id": "", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"]["children"] == snapshot From 368e958457a192a03fc1202f1e360659ef8c2afe Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 22 Dec 2024 21:10:12 +0100 Subject: [PATCH 640/677] Load data for multiple days in Nord Pool (#133371) * Load data for multiple days in Nord Pool * Fix current day * Fix tests * Fix services * Fix fixtures * Mod get_data_current_day * Mods * simplify further --- .../components/nordpool/coordinator.py | 31 +- homeassistant/components/nordpool/sensor.py | 86 +- tests/components/nordpool/conftest.py | 93 +- ...period.json => delivery_period_today.json} | 0 .../fixtures/delivery_period_tomorrow.json | 272 +++++ .../fixtures/delivery_period_yesterday.json | 272 +++++ .../nordpool/snapshots/test_diagnostics.ambr | 1048 +++++++++++++---- tests/components/nordpool/test_config_flow.py | 80 +- tests/components/nordpool/test_coordinator.py | 57 +- tests/components/nordpool/test_diagnostics.py | 6 +- tests/components/nordpool/test_init.py | 15 +- tests/components/nordpool/test_sensor.py | 15 +- tests/components/nordpool/test_services.py | 69 +- 13 files changed, 1582 insertions(+), 462 deletions(-) rename tests/components/nordpool/fixtures/{delivery_period.json => delivery_period_today.json} (100%) create mode 100644 tests/components/nordpool/fixtures/delivery_period_tomorrow.json create mode 100644 tests/components/nordpool/fixtures/delivery_period_yesterday.json diff --git a/homeassistant/components/nordpool/coordinator.py b/homeassistant/components/nordpool/coordinator.py index e6b36f7deee..0c9a7e9f337 100644 --- a/homeassistant/components/nordpool/coordinator.py +++ b/homeassistant/components/nordpool/coordinator.py @@ -10,6 +10,8 @@ from typing import TYPE_CHECKING from pynordpool import ( Currency, DeliveryPeriodData, + DeliveryPeriodEntry, + DeliveryPeriodsData, NordPoolClient, NordPoolEmptyResponseError, NordPoolError, @@ -29,7 +31,7 @@ if TYPE_CHECKING: from . import NordPoolConfigEntry -class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): +class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]): """A Nord Pool Data Update Coordinator.""" config_entry: NordPoolConfigEntry @@ -74,12 +76,16 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): if data: self.async_set_updated_data(data) - async def api_call(self, retry: int = 3) -> DeliveryPeriodData | None: + async def api_call(self, retry: int = 3) -> DeliveryPeriodsData | None: """Make api call to retrieve data with retry if failure.""" data = None try: - data = await self.client.async_get_delivery_period( - dt_util.now(), + data = await self.client.async_get_delivery_periods( + [ + dt_util.now() - timedelta(days=1), + dt_util.now(), + dt_util.now() + timedelta(days=1), + ], Currency(self.config_entry.data[CONF_CURRENCY]), self.config_entry.data[CONF_AREAS], ) @@ -97,3 +103,20 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): self.async_set_update_error(error) return data + + def merge_price_entries(self) -> list[DeliveryPeriodEntry]: + """Return the merged price entries.""" + merged_entries: list[DeliveryPeriodEntry] = [] + for del_period in self.data.entries: + merged_entries.extend(del_period.entries) + return merged_entries + + def get_data_current_day(self) -> DeliveryPeriodData: + """Return the current day data.""" + current_day = dt_util.utcnow().strftime("%Y-%m-%d") + delivery_period: DeliveryPeriodData = self.data.entries[0] + for del_period in self.data.entries: + if del_period.requested_date == current_day: + delivery_period = del_period + break + return delivery_period diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py index fe966e99168..30910f8e5f6 100644 --- a/homeassistant/components/nordpool/sensor.py +++ b/homeassistant/components/nordpool/sensor.py @@ -6,8 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass from datetime import datetime, timedelta -from pynordpool import DeliveryPeriodData - from homeassistant.components.sensor import ( EntityCategory, SensorDeviceClass, @@ -29,34 +27,34 @@ PARALLEL_UPDATES = 0 def validate_prices( func: Callable[ - [DeliveryPeriodData], dict[str, tuple[float | None, float, float | None]] + [NordpoolPriceSensor], dict[str, tuple[float | None, float, float | None]] ], - data: DeliveryPeriodData, + entity: NordpoolPriceSensor, area: str, index: int, ) -> float | None: """Validate and return.""" - if result := func(data)[area][index]: + if result := func(entity)[area][index]: return result / 1000 return None def get_prices( - data: DeliveryPeriodData, + entity: NordpoolPriceSensor, ) -> dict[str, tuple[float | None, float, float | None]]: """Return previous, current and next prices. Output: {"SE3": (10.0, 10.5, 12.1)} """ + data = entity.coordinator.merge_price_entries() last_price_entries: dict[str, float] = {} current_price_entries: dict[str, float] = {} next_price_entries: dict[str, float] = {} current_time = dt_util.utcnow() previous_time = current_time - timedelta(hours=1) next_time = current_time + timedelta(hours=1) - price_data = data.entries - LOGGER.debug("Price data: %s", price_data) - for entry in price_data: + LOGGER.debug("Price data: %s", data) + for entry in data: if entry.start <= current_time <= entry.end: current_price_entries = entry.entry if entry.start <= previous_time <= entry.end: @@ -82,11 +80,12 @@ def get_prices( def get_min_max_price( - data: DeliveryPeriodData, - area: str, + entity: NordpoolPriceSensor, func: Callable[[float, float], float], ) -> tuple[float, datetime, datetime]: """Get the lowest price from the data.""" + data = entity.coordinator.get_data_current_day() + area = entity.area price_data = data.entries price: float = price_data[0].entry[area] start: datetime = price_data[0].start @@ -102,12 +101,13 @@ def get_min_max_price( def get_blockprices( - data: DeliveryPeriodData, + entity: NordpoolBlockPriceSensor, ) -> dict[str, dict[str, tuple[datetime, datetime, float, float, float]]]: """Return average, min and max for block prices. Output: {"SE3": {"Off-peak 1": (_datetime_, _datetime_, 9.3, 10.5, 12.1)}} """ + data = entity.coordinator.get_data_current_day() result: dict[str, dict[str, tuple[datetime, datetime, float, float, float]]] = {} block_prices = data.block_prices for entry in block_prices: @@ -130,15 +130,15 @@ def get_blockprices( class NordpoolDefaultSensorEntityDescription(SensorEntityDescription): """Describes Nord Pool default sensor entity.""" - value_fn: Callable[[DeliveryPeriodData], str | float | datetime | None] + value_fn: Callable[[NordpoolSensor], str | float | datetime | None] @dataclass(frozen=True, kw_only=True) class NordpoolPricesSensorEntityDescription(SensorEntityDescription): """Describes Nord Pool prices sensor entity.""" - value_fn: Callable[[DeliveryPeriodData, str], float | None] - extra_fn: Callable[[DeliveryPeriodData, str], dict[str, str] | None] + value_fn: Callable[[NordpoolPriceSensor], float | None] + extra_fn: Callable[[NordpoolPriceSensor], dict[str, str] | None] @dataclass(frozen=True, kw_only=True) @@ -155,19 +155,19 @@ DEFAULT_SENSOR_TYPES: tuple[NordpoolDefaultSensorEntityDescription, ...] = ( key="updated_at", translation_key="updated_at", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: data.updated_at, + value_fn=lambda entity: entity.coordinator.get_data_current_day().updated_at, entity_category=EntityCategory.DIAGNOSTIC, ), NordpoolDefaultSensorEntityDescription( key="currency", translation_key="currency", - value_fn=lambda data: data.currency, + value_fn=lambda entity: entity.coordinator.get_data_current_day().currency, entity_category=EntityCategory.DIAGNOSTIC, ), NordpoolDefaultSensorEntityDescription( key="exchange_rate", translation_key="exchange_rate", - value_fn=lambda data: data.exchange_rate, + value_fn=lambda entity: entity.coordinator.get_data_current_day().exchange_rate, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, @@ -177,42 +177,42 @@ PRICES_SENSOR_TYPES: tuple[NordpoolPricesSensorEntityDescription, ...] = ( NordpoolPricesSensorEntityDescription( key="current_price", translation_key="current_price", - value_fn=lambda data, area: validate_prices(get_prices, data, area, 1), - extra_fn=lambda data, area: None, + value_fn=lambda entity: validate_prices(get_prices, entity, entity.area, 1), + extra_fn=lambda entity: None, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="last_price", translation_key="last_price", - value_fn=lambda data, area: validate_prices(get_prices, data, area, 0), - extra_fn=lambda data, area: None, + value_fn=lambda entity: validate_prices(get_prices, entity, entity.area, 0), + extra_fn=lambda entity: None, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="next_price", translation_key="next_price", - value_fn=lambda data, area: validate_prices(get_prices, data, area, 2), - extra_fn=lambda data, area: None, + value_fn=lambda entity: validate_prices(get_prices, entity, entity.area, 2), + extra_fn=lambda entity: None, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="lowest_price", translation_key="lowest_price", - value_fn=lambda data, area: get_min_max_price(data, area, min)[0] / 1000, - extra_fn=lambda data, area: { - "start": get_min_max_price(data, area, min)[1].isoformat(), - "end": get_min_max_price(data, area, min)[2].isoformat(), + value_fn=lambda entity: get_min_max_price(entity, min)[0] / 1000, + extra_fn=lambda entity: { + "start": get_min_max_price(entity, min)[1].isoformat(), + "end": get_min_max_price(entity, min)[2].isoformat(), }, suggested_display_precision=2, ), NordpoolPricesSensorEntityDescription( key="highest_price", translation_key="highest_price", - value_fn=lambda data, area: get_min_max_price(data, area, max)[0] / 1000, - extra_fn=lambda data, area: { - "start": get_min_max_price(data, area, max)[1].isoformat(), - "end": get_min_max_price(data, area, max)[2].isoformat(), + value_fn=lambda entity: get_min_max_price(entity, max)[0] / 1000, + extra_fn=lambda entity: { + "start": get_min_max_price(entity, max)[1].isoformat(), + "end": get_min_max_price(entity, max)[2].isoformat(), }, suggested_display_precision=2, ), @@ -276,11 +276,12 @@ async def async_setup_entry( """Set up Nord Pool sensor platform.""" coordinator = entry.runtime_data + current_day_data = entry.runtime_data.get_data_current_day() entities: list[NordpoolBaseEntity] = [] - currency = entry.runtime_data.data.currency + currency = current_day_data.currency - for area in get_prices(entry.runtime_data.data): + for area in current_day_data.area_average: LOGGER.debug("Setting up base sensors for area %s", area) entities.extend( NordpoolSensor(coordinator, description, area) @@ -297,16 +298,16 @@ async def async_setup_entry( NordpoolDailyAveragePriceSensor(coordinator, description, area, currency) for description in DAILY_AVERAGE_PRICES_SENSOR_TYPES ) - for block_name in get_blockprices(coordinator.data)[area]: + for block_prices in entry.runtime_data.get_data_current_day().block_prices: LOGGER.debug( "Setting up block price sensors for area %s with currency %s in block %s", area, currency, - block_name, + block_prices.name, ) entities.extend( NordpoolBlockPriceSensor( - coordinator, description, area, currency, block_name + coordinator, description, area, currency, block_prices.name ) for description in BLOCK_PRICES_SENSOR_TYPES ) @@ -321,7 +322,7 @@ class NordpoolSensor(NordpoolBaseEntity, SensorEntity): @property def native_value(self) -> str | float | datetime | None: """Return value of sensor.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self) class NordpoolPriceSensor(NordpoolBaseEntity, SensorEntity): @@ -343,12 +344,12 @@ class NordpoolPriceSensor(NordpoolBaseEntity, SensorEntity): @property def native_value(self) -> float | None: """Return value of sensor.""" - return self.entity_description.value_fn(self.coordinator.data, self.area) + return self.entity_description.value_fn(self) @property def extra_state_attributes(self) -> dict[str, str] | None: """Return the extra state attributes.""" - return self.entity_description.extra_fn(self.coordinator.data, self.area) + return self.entity_description.extra_fn(self) class NordpoolBlockPriceSensor(NordpoolBaseEntity, SensorEntity): @@ -376,7 +377,7 @@ class NordpoolBlockPriceSensor(NordpoolBaseEntity, SensorEntity): def native_value(self) -> float | datetime | None: """Return value of sensor.""" return self.entity_description.value_fn( - get_blockprices(self.coordinator.data)[self.area][self.block_name] + get_blockprices(self)[self.area][self.block_name] ) @@ -399,4 +400,5 @@ class NordpoolDailyAveragePriceSensor(NordpoolBaseEntity, SensorEntity): @property def native_value(self) -> float | None: """Return value of sensor.""" - return self.coordinator.data.area_average[self.area] / 1000 + data = self.coordinator.get_data_current_day() + return data.area_average[self.area] / 1000 diff --git a/tests/components/nordpool/conftest.py b/tests/components/nordpool/conftest.py index 9b7ab4b2afa..1c26c7f84eb 100644 --- a/tests/components/nordpool/conftest.py +++ b/tests/components/nordpool/conftest.py @@ -3,20 +3,16 @@ from __future__ import annotations from collections.abc import AsyncGenerator -from datetime import datetime import json from typing import Any from unittest.mock import patch -from pynordpool import NordPoolClient -from pynordpool.const import Currency -from pynordpool.model import DeliveryPeriodData +from pynordpool import API, NordPoolClient import pytest from homeassistant.components.nordpool.const import DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from . import ENTRY_CONFIG @@ -32,9 +28,7 @@ async def no_sleep() -> AsyncGenerator[None]: @pytest.fixture -async def load_int( - hass: HomeAssistant, get_data: DeliveryPeriodData -) -> MockConfigEntry: +async def load_int(hass: HomeAssistant, get_client: NordPoolClient) -> MockConfigEntry: """Set up the Nord Pool integration in Home Assistant.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -44,40 +38,83 @@ async def load_int( config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() return config_entry -@pytest.fixture(name="get_data") +@pytest.fixture(name="get_client") async def get_data_from_library( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, load_json: dict[str, Any] -) -> DeliveryPeriodData: + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + load_json: list[dict[str, Any]], +) -> AsyncGenerator[NordPoolClient]: """Retrieve data from Nord Pool library.""" - + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-05", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + json=load_json[0], + ) + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-05", + "market": "DayAhead", + "deliveryArea": "SE3", + "currency": "EUR", + }, + json=load_json[0], + ) + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-04", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + json=load_json[1], + ) + aioclient_mock.request( + "GET", + url=API + "/DayAheadPrices", + params={ + "date": "2024-11-06", + "market": "DayAhead", + "deliveryArea": "SE3,SE4", + "currency": "SEK", + }, + json=load_json[2], + ) client = NordPoolClient(aioclient_mock.create_session(hass.loop)) - with patch("pynordpool.NordPoolClient._get", return_value=load_json): - output = await client.async_get_delivery_period( - datetime(2024, 11, 5, 13, tzinfo=dt_util.UTC), Currency.SEK, ["SE3", "SE4"] - ) + yield client await client._session.close() - return output @pytest.fixture(name="load_json") -def load_json_from_fixture(load_data: str) -> dict[str, Any]: +def load_json_from_fixture(load_data: list[str, str, str]) -> list[dict[str, Any]]: """Load fixture with json data and return.""" - return json.loads(load_data) + return [ + json.loads(load_data[0]), + json.loads(load_data[1]), + json.loads(load_data[2]), + ] @pytest.fixture(name="load_data", scope="package") -def load_data_from_fixture() -> str: +def load_data_from_fixture() -> list[str, str, str]: """Load fixture with fixture data and return.""" - return load_fixture("delivery_period.json", DOMAIN) + return [ + load_fixture("delivery_period_today.json", DOMAIN), + load_fixture("delivery_period_yesterday.json", DOMAIN), + load_fixture("delivery_period_tomorrow.json", DOMAIN), + ] diff --git a/tests/components/nordpool/fixtures/delivery_period.json b/tests/components/nordpool/fixtures/delivery_period_today.json similarity index 100% rename from tests/components/nordpool/fixtures/delivery_period.json rename to tests/components/nordpool/fixtures/delivery_period_today.json diff --git a/tests/components/nordpool/fixtures/delivery_period_tomorrow.json b/tests/components/nordpool/fixtures/delivery_period_tomorrow.json new file mode 100644 index 00000000000..abaa24e93ed --- /dev/null +++ b/tests/components/nordpool/fixtures/delivery_period_tomorrow.json @@ -0,0 +1,272 @@ +{ + "deliveryDateCET": "2024-11-06", + "version": 3, + "updatedAt": "2024-11-05T12:12:51.9853434Z", + "deliveryAreas": ["SE3", "SE4"], + "market": "DayAhead", + "multiAreaEntries": [ + { + "deliveryStart": "2024-11-05T23:00:00Z", + "deliveryEnd": "2024-11-06T00:00:00Z", + "entryPerArea": { + "SE3": 126.66, + "SE4": 275.6 + } + }, + { + "deliveryStart": "2024-11-06T00:00:00Z", + "deliveryEnd": "2024-11-06T01:00:00Z", + "entryPerArea": { + "SE3": 74.06, + "SE4": 157.34 + } + }, + { + "deliveryStart": "2024-11-06T01:00:00Z", + "deliveryEnd": "2024-11-06T02:00:00Z", + "entryPerArea": { + "SE3": 78.38, + "SE4": 165.62 + } + }, + { + "deliveryStart": "2024-11-06T02:00:00Z", + "deliveryEnd": "2024-11-06T03:00:00Z", + "entryPerArea": { + "SE3": 92.37, + "SE4": 196.17 + } + }, + { + "deliveryStart": "2024-11-06T03:00:00Z", + "deliveryEnd": "2024-11-06T04:00:00Z", + "entryPerArea": { + "SE3": 99.14, + "SE4": 190.58 + } + }, + { + "deliveryStart": "2024-11-06T04:00:00Z", + "deliveryEnd": "2024-11-06T05:00:00Z", + "entryPerArea": { + "SE3": 447.51, + "SE4": 932.93 + } + }, + { + "deliveryStart": "2024-11-06T05:00:00Z", + "deliveryEnd": "2024-11-06T06:00:00Z", + "entryPerArea": { + "SE3": 641.47, + "SE4": 1284.69 + } + }, + { + "deliveryStart": "2024-11-06T06:00:00Z", + "deliveryEnd": "2024-11-06T07:00:00Z", + "entryPerArea": { + "SE3": 1820.5, + "SE4": 2449.96 + } + }, + { + "deliveryStart": "2024-11-06T07:00:00Z", + "deliveryEnd": "2024-11-06T08:00:00Z", + "entryPerArea": { + "SE3": 1723.0, + "SE4": 2244.22 + } + }, + { + "deliveryStart": "2024-11-06T08:00:00Z", + "deliveryEnd": "2024-11-06T09:00:00Z", + "entryPerArea": { + "SE3": 1298.57, + "SE4": 1643.45 + } + }, + { + "deliveryStart": "2024-11-06T09:00:00Z", + "deliveryEnd": "2024-11-06T10:00:00Z", + "entryPerArea": { + "SE3": 1099.25, + "SE4": 1507.23 + } + }, + { + "deliveryStart": "2024-11-06T10:00:00Z", + "deliveryEnd": "2024-11-06T11:00:00Z", + "entryPerArea": { + "SE3": 903.31, + "SE4": 1362.84 + } + }, + { + "deliveryStart": "2024-11-06T11:00:00Z", + "deliveryEnd": "2024-11-06T12:00:00Z", + "entryPerArea": { + "SE3": 959.99, + "SE4": 1376.13 + } + }, + { + "deliveryStart": "2024-11-06T12:00:00Z", + "deliveryEnd": "2024-11-06T13:00:00Z", + "entryPerArea": { + "SE3": 1186.61, + "SE4": 1449.96 + } + }, + { + "deliveryStart": "2024-11-06T13:00:00Z", + "deliveryEnd": "2024-11-06T14:00:00Z", + "entryPerArea": { + "SE3": 1307.67, + "SE4": 1608.35 + } + }, + { + "deliveryStart": "2024-11-06T14:00:00Z", + "deliveryEnd": "2024-11-06T15:00:00Z", + "entryPerArea": { + "SE3": 1385.46, + "SE4": 2110.8 + } + }, + { + "deliveryStart": "2024-11-06T15:00:00Z", + "deliveryEnd": "2024-11-06T16:00:00Z", + "entryPerArea": { + "SE3": 1366.8, + "SE4": 3031.25 + } + }, + { + "deliveryStart": "2024-11-06T16:00:00Z", + "deliveryEnd": "2024-11-06T17:00:00Z", + "entryPerArea": { + "SE3": 2366.57, + "SE4": 5511.77 + } + }, + { + "deliveryStart": "2024-11-06T17:00:00Z", + "deliveryEnd": "2024-11-06T18:00:00Z", + "entryPerArea": { + "SE3": 1481.92, + "SE4": 3351.64 + } + }, + { + "deliveryStart": "2024-11-06T18:00:00Z", + "deliveryEnd": "2024-11-06T19:00:00Z", + "entryPerArea": { + "SE3": 1082.69, + "SE4": 2484.95 + } + }, + { + "deliveryStart": "2024-11-06T19:00:00Z", + "deliveryEnd": "2024-11-06T20:00:00Z", + "entryPerArea": { + "SE3": 716.82, + "SE4": 1624.33 + } + }, + { + "deliveryStart": "2024-11-06T20:00:00Z", + "deliveryEnd": "2024-11-06T21:00:00Z", + "entryPerArea": { + "SE3": 583.16, + "SE4": 1306.27 + } + }, + { + "deliveryStart": "2024-11-06T21:00:00Z", + "deliveryEnd": "2024-11-06T22:00:00Z", + "entryPerArea": { + "SE3": 523.09, + "SE4": 1142.99 + } + }, + { + "deliveryStart": "2024-11-06T22:00:00Z", + "deliveryEnd": "2024-11-06T23:00:00Z", + "entryPerArea": { + "SE3": 250.64, + "SE4": 539.42 + } + } + ], + "blockPriceAggregates": [ + { + "blockName": "Off-peak 1", + "deliveryStart": "2024-11-05T23:00:00Z", + "deliveryEnd": "2024-11-06T07:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 422.51, + "min": 74.06, + "max": 1820.5 + }, + "SE4": { + "average": 706.61, + "min": 157.34, + "max": 2449.96 + } + } + }, + { + "blockName": "Peak", + "deliveryStart": "2024-11-06T07:00:00Z", + "deliveryEnd": "2024-11-06T19:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 1346.82, + "min": 903.31, + "max": 2366.57 + }, + "SE4": { + "average": 2306.88, + "min": 1362.84, + "max": 5511.77 + } + } + }, + { + "blockName": "Off-peak 2", + "deliveryStart": "2024-11-06T19:00:00Z", + "deliveryEnd": "2024-11-06T23:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 518.43, + "min": 250.64, + "max": 716.82 + }, + "SE4": { + "average": 1153.25, + "min": 539.42, + "max": 1624.33 + } + } + } + ], + "currency": "SEK", + "exchangeRate": 11.66314, + "areaStates": [ + { + "state": "Final", + "areas": ["SE3", "SE4"] + } + ], + "areaAverages": [ + { + "areaCode": "SE3", + "price": 900.65 + }, + { + "areaCode": "SE4", + "price": 1581.19 + } + ] +} diff --git a/tests/components/nordpool/fixtures/delivery_period_yesterday.json b/tests/components/nordpool/fixtures/delivery_period_yesterday.json new file mode 100644 index 00000000000..bc79aeb99f0 --- /dev/null +++ b/tests/components/nordpool/fixtures/delivery_period_yesterday.json @@ -0,0 +1,272 @@ +{ + "deliveryDateCET": "2024-11-04", + "version": 3, + "updatedAt": "2024-11-04T08:09:11.1931991Z", + "deliveryAreas": ["SE3", "SE4"], + "market": "DayAhead", + "multiAreaEntries": [ + { + "deliveryStart": "2024-11-03T23:00:00Z", + "deliveryEnd": "2024-11-04T00:00:00Z", + "entryPerArea": { + "SE3": 66.13, + "SE4": 78.59 + } + }, + { + "deliveryStart": "2024-11-04T00:00:00Z", + "deliveryEnd": "2024-11-04T01:00:00Z", + "entryPerArea": { + "SE3": 72.54, + "SE4": 86.51 + } + }, + { + "deliveryStart": "2024-11-04T01:00:00Z", + "deliveryEnd": "2024-11-04T02:00:00Z", + "entryPerArea": { + "SE3": 73.12, + "SE4": 84.88 + } + }, + { + "deliveryStart": "2024-11-04T02:00:00Z", + "deliveryEnd": "2024-11-04T03:00:00Z", + "entryPerArea": { + "SE3": 171.97, + "SE4": 217.26 + } + }, + { + "deliveryStart": "2024-11-04T03:00:00Z", + "deliveryEnd": "2024-11-04T04:00:00Z", + "entryPerArea": { + "SE3": 181.05, + "SE4": 227.74 + } + }, + { + "deliveryStart": "2024-11-04T04:00:00Z", + "deliveryEnd": "2024-11-04T05:00:00Z", + "entryPerArea": { + "SE3": 360.71, + "SE4": 414.61 + } + }, + { + "deliveryStart": "2024-11-04T05:00:00Z", + "deliveryEnd": "2024-11-04T06:00:00Z", + "entryPerArea": { + "SE3": 917.83, + "SE4": 1439.33 + } + }, + { + "deliveryStart": "2024-11-04T06:00:00Z", + "deliveryEnd": "2024-11-04T07:00:00Z", + "entryPerArea": { + "SE3": 1426.17, + "SE4": 1695.95 + } + }, + { + "deliveryStart": "2024-11-04T07:00:00Z", + "deliveryEnd": "2024-11-04T08:00:00Z", + "entryPerArea": { + "SE3": 1350.96, + "SE4": 1605.13 + } + }, + { + "deliveryStart": "2024-11-04T08:00:00Z", + "deliveryEnd": "2024-11-04T09:00:00Z", + "entryPerArea": { + "SE3": 1195.06, + "SE4": 1393.46 + } + }, + { + "deliveryStart": "2024-11-04T09:00:00Z", + "deliveryEnd": "2024-11-04T10:00:00Z", + "entryPerArea": { + "SE3": 992.35, + "SE4": 1126.71 + } + }, + { + "deliveryStart": "2024-11-04T10:00:00Z", + "deliveryEnd": "2024-11-04T11:00:00Z", + "entryPerArea": { + "SE3": 976.63, + "SE4": 1107.97 + } + }, + { + "deliveryStart": "2024-11-04T11:00:00Z", + "deliveryEnd": "2024-11-04T12:00:00Z", + "entryPerArea": { + "SE3": 952.76, + "SE4": 1085.73 + } + }, + { + "deliveryStart": "2024-11-04T12:00:00Z", + "deliveryEnd": "2024-11-04T13:00:00Z", + "entryPerArea": { + "SE3": 1029.37, + "SE4": 1177.71 + } + }, + { + "deliveryStart": "2024-11-04T13:00:00Z", + "deliveryEnd": "2024-11-04T14:00:00Z", + "entryPerArea": { + "SE3": 1043.35, + "SE4": 1194.59 + } + }, + { + "deliveryStart": "2024-11-04T14:00:00Z", + "deliveryEnd": "2024-11-04T15:00:00Z", + "entryPerArea": { + "SE3": 1359.57, + "SE4": 1561.12 + } + }, + { + "deliveryStart": "2024-11-04T15:00:00Z", + "deliveryEnd": "2024-11-04T16:00:00Z", + "entryPerArea": { + "SE3": 1848.35, + "SE4": 2145.84 + } + }, + { + "deliveryStart": "2024-11-04T16:00:00Z", + "deliveryEnd": "2024-11-04T17:00:00Z", + "entryPerArea": { + "SE3": 2812.53, + "SE4": 3313.53 + } + }, + { + "deliveryStart": "2024-11-04T17:00:00Z", + "deliveryEnd": "2024-11-04T18:00:00Z", + "entryPerArea": { + "SE3": 2351.69, + "SE4": 2751.87 + } + }, + { + "deliveryStart": "2024-11-04T18:00:00Z", + "deliveryEnd": "2024-11-04T19:00:00Z", + "entryPerArea": { + "SE3": 1553.08, + "SE4": 1842.77 + } + }, + { + "deliveryStart": "2024-11-04T19:00:00Z", + "deliveryEnd": "2024-11-04T20:00:00Z", + "entryPerArea": { + "SE3": 1165.02, + "SE4": 1398.35 + } + }, + { + "deliveryStart": "2024-11-04T20:00:00Z", + "deliveryEnd": "2024-11-04T21:00:00Z", + "entryPerArea": { + "SE3": 1007.48, + "SE4": 1172.35 + } + }, + { + "deliveryStart": "2024-11-04T21:00:00Z", + "deliveryEnd": "2024-11-04T22:00:00Z", + "entryPerArea": { + "SE3": 792.09, + "SE4": 920.28 + } + }, + { + "deliveryStart": "2024-11-04T22:00:00Z", + "deliveryEnd": "2024-11-04T23:00:00Z", + "entryPerArea": { + "SE3": 465.38, + "SE4": 528.83 + } + } + ], + "blockPriceAggregates": [ + { + "blockName": "Off-peak 1", + "deliveryStart": "2024-11-03T23:00:00Z", + "deliveryEnd": "2024-11-04T07:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 408.69, + "min": 66.13, + "max": 1426.17 + }, + "SE4": { + "average": 530.61, + "min": 78.59, + "max": 1695.95 + } + } + }, + { + "blockName": "Peak", + "deliveryStart": "2024-11-04T07:00:00Z", + "deliveryEnd": "2024-11-04T19:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 1455.48, + "min": 952.76, + "max": 2812.53 + }, + "SE4": { + "average": 1692.2, + "min": 1085.73, + "max": 3313.53 + } + } + }, + { + "blockName": "Off-peak 2", + "deliveryStart": "2024-11-04T19:00:00Z", + "deliveryEnd": "2024-11-04T23:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 857.49, + "min": 465.38, + "max": 1165.02 + }, + "SE4": { + "average": 1004.95, + "min": 528.83, + "max": 1398.35 + } + } + } + ], + "currency": "SEK", + "exchangeRate": 11.64318, + "areaStates": [ + { + "state": "Final", + "areas": ["SE3", "SE4"] + } + ], + "areaAverages": [ + { + "areaCode": "SE3", + "price": 1006.88 + }, + { + "areaCode": "SE4", + "price": 1190.46 + } + ] +} diff --git a/tests/components/nordpool/snapshots/test_diagnostics.ambr b/tests/components/nordpool/snapshots/test_diagnostics.ambr index dde2eca0022..76a3dd96405 100644 --- a/tests/components/nordpool/snapshots/test_diagnostics.ambr +++ b/tests/components/nordpool/snapshots/test_diagnostics.ambr @@ -2,282 +2,840 @@ # name: test_diagnostics dict({ 'raw': dict({ - 'areaAverages': list([ - dict({ - 'areaCode': 'SE3', - 'price': 900.74, - }), - dict({ - 'areaCode': 'SE4', - 'price': 1166.12, - }), - ]), - 'areaStates': list([ - dict({ - 'areas': list([ - 'SE3', - 'SE4', - ]), - 'state': 'Final', - }), - ]), - 'blockPriceAggregates': list([ - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 422.87, - 'max': 1406.14, - 'min': 61.69, + '2024-11-04': dict({ + 'areaAverages': list([ + dict({ + 'areaCode': 'SE3', + 'price': 1006.88, + }), + dict({ + 'areaCode': 'SE4', + 'price': 1190.46, + }), + ]), + 'areaStates': list([ + dict({ + 'areas': list([ + 'SE3', + 'SE4', + ]), + 'state': 'Final', + }), + ]), + 'blockPriceAggregates': list([ + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 408.69, + 'max': 1426.17, + 'min': 66.13, + }), + 'SE4': dict({ + 'average': 530.61, + 'max': 1695.95, + 'min': 78.59, + }), }), - 'SE4': dict({ - 'average': 497.97, - 'max': 1648.25, - 'min': 65.19, + 'blockName': 'Off-peak 1', + 'deliveryEnd': '2024-11-04T07:00:00Z', + 'deliveryStart': '2024-11-03T23:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 1455.48, + 'max': 2812.53, + 'min': 952.76, + }), + 'SE4': dict({ + 'average': 1692.2, + 'max': 3313.53, + 'min': 1085.73, + }), + }), + 'blockName': 'Peak', + 'deliveryEnd': '2024-11-04T19:00:00Z', + 'deliveryStart': '2024-11-04T07:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 857.49, + 'max': 1165.02, + 'min': 465.38, + }), + 'SE4': dict({ + 'average': 1004.95, + 'max': 1398.35, + 'min': 528.83, + }), + }), + 'blockName': 'Off-peak 2', + 'deliveryEnd': '2024-11-04T23:00:00Z', + 'deliveryStart': '2024-11-04T19:00:00Z', + }), + ]), + 'currency': 'SEK', + 'deliveryAreas': list([ + 'SE3', + 'SE4', + ]), + 'deliveryDateCET': '2024-11-04', + 'exchangeRate': 11.64318, + 'market': 'DayAhead', + 'multiAreaEntries': list([ + dict({ + 'deliveryEnd': '2024-11-04T00:00:00Z', + 'deliveryStart': '2024-11-03T23:00:00Z', + 'entryPerArea': dict({ + 'SE3': 66.13, + 'SE4': 78.59, }), }), - 'blockName': 'Off-peak 1', - 'deliveryEnd': '2024-11-05T07:00:00Z', - 'deliveryStart': '2024-11-04T23:00:00Z', - }), - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 1315.97, - 'max': 2512.65, - 'min': 925.05, - }), - 'SE4': dict({ - 'average': 1735.59, - 'max': 3533.03, - 'min': 1081.72, + dict({ + 'deliveryEnd': '2024-11-04T01:00:00Z', + 'deliveryStart': '2024-11-04T00:00:00Z', + 'entryPerArea': dict({ + 'SE3': 72.54, + 'SE4': 86.51, }), }), - 'blockName': 'Peak', - 'deliveryEnd': '2024-11-05T19:00:00Z', - 'deliveryStart': '2024-11-05T07:00:00Z', - }), - dict({ - 'averagePricePerArea': dict({ - 'SE3': dict({ - 'average': 610.79, - 'max': 835.53, - 'min': 289.14, - }), - 'SE4': dict({ - 'average': 793.98, - 'max': 1112.57, - 'min': 349.21, + dict({ + 'deliveryEnd': '2024-11-04T02:00:00Z', + 'deliveryStart': '2024-11-04T01:00:00Z', + 'entryPerArea': dict({ + 'SE3': 73.12, + 'SE4': 84.88, }), }), - 'blockName': 'Off-peak 2', - 'deliveryEnd': '2024-11-05T23:00:00Z', - 'deliveryStart': '2024-11-05T19:00:00Z', - }), - ]), - 'currency': 'SEK', - 'deliveryAreas': list([ - 'SE3', - 'SE4', - ]), - 'deliveryDateCET': '2024-11-05', - 'exchangeRate': 11.6402, - 'market': 'DayAhead', - 'multiAreaEntries': list([ - dict({ - 'deliveryEnd': '2024-11-05T00:00:00Z', - 'deliveryStart': '2024-11-04T23:00:00Z', - 'entryPerArea': dict({ - 'SE3': 250.73, - 'SE4': 283.79, + dict({ + 'deliveryEnd': '2024-11-04T03:00:00Z', + 'deliveryStart': '2024-11-04T02:00:00Z', + 'entryPerArea': dict({ + 'SE3': 171.97, + 'SE4': 217.26, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T01:00:00Z', - 'deliveryStart': '2024-11-05T00:00:00Z', - 'entryPerArea': dict({ - 'SE3': 76.36, - 'SE4': 81.36, + dict({ + 'deliveryEnd': '2024-11-04T04:00:00Z', + 'deliveryStart': '2024-11-04T03:00:00Z', + 'entryPerArea': dict({ + 'SE3': 181.05, + 'SE4': 227.74, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T02:00:00Z', - 'deliveryStart': '2024-11-05T01:00:00Z', - 'entryPerArea': dict({ - 'SE3': 73.92, - 'SE4': 79.15, + dict({ + 'deliveryEnd': '2024-11-04T05:00:00Z', + 'deliveryStart': '2024-11-04T04:00:00Z', + 'entryPerArea': dict({ + 'SE3': 360.71, + 'SE4': 414.61, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T03:00:00Z', - 'deliveryStart': '2024-11-05T02:00:00Z', - 'entryPerArea': dict({ - 'SE3': 61.69, - 'SE4': 65.19, + dict({ + 'deliveryEnd': '2024-11-04T06:00:00Z', + 'deliveryStart': '2024-11-04T05:00:00Z', + 'entryPerArea': dict({ + 'SE3': 917.83, + 'SE4': 1439.33, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T04:00:00Z', - 'deliveryStart': '2024-11-05T03:00:00Z', - 'entryPerArea': dict({ - 'SE3': 64.6, - 'SE4': 68.44, + dict({ + 'deliveryEnd': '2024-11-04T07:00:00Z', + 'deliveryStart': '2024-11-04T06:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1426.17, + 'SE4': 1695.95, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T05:00:00Z', - 'deliveryStart': '2024-11-05T04:00:00Z', - 'entryPerArea': dict({ - 'SE3': 453.27, - 'SE4': 516.71, + dict({ + 'deliveryEnd': '2024-11-04T08:00:00Z', + 'deliveryStart': '2024-11-04T07:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1350.96, + 'SE4': 1605.13, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T06:00:00Z', - 'deliveryStart': '2024-11-05T05:00:00Z', - 'entryPerArea': dict({ - 'SE3': 996.28, - 'SE4': 1240.85, + dict({ + 'deliveryEnd': '2024-11-04T09:00:00Z', + 'deliveryStart': '2024-11-04T08:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1195.06, + 'SE4': 1393.46, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T07:00:00Z', - 'deliveryStart': '2024-11-05T06:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1406.14, - 'SE4': 1648.25, + dict({ + 'deliveryEnd': '2024-11-04T10:00:00Z', + 'deliveryStart': '2024-11-04T09:00:00Z', + 'entryPerArea': dict({ + 'SE3': 992.35, + 'SE4': 1126.71, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T08:00:00Z', - 'deliveryStart': '2024-11-05T07:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1346.54, - 'SE4': 1570.5, + dict({ + 'deliveryEnd': '2024-11-04T11:00:00Z', + 'deliveryStart': '2024-11-04T10:00:00Z', + 'entryPerArea': dict({ + 'SE3': 976.63, + 'SE4': 1107.97, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T09:00:00Z', - 'deliveryStart': '2024-11-05T08:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1150.28, - 'SE4': 1345.37, + dict({ + 'deliveryEnd': '2024-11-04T12:00:00Z', + 'deliveryStart': '2024-11-04T11:00:00Z', + 'entryPerArea': dict({ + 'SE3': 952.76, + 'SE4': 1085.73, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T10:00:00Z', - 'deliveryStart': '2024-11-05T09:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1031.32, - 'SE4': 1206.51, + dict({ + 'deliveryEnd': '2024-11-04T13:00:00Z', + 'deliveryStart': '2024-11-04T12:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1029.37, + 'SE4': 1177.71, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T11:00:00Z', - 'deliveryStart': '2024-11-05T10:00:00Z', - 'entryPerArea': dict({ - 'SE3': 927.37, - 'SE4': 1085.8, + dict({ + 'deliveryEnd': '2024-11-04T14:00:00Z', + 'deliveryStart': '2024-11-04T13:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1043.35, + 'SE4': 1194.59, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T12:00:00Z', - 'deliveryStart': '2024-11-05T11:00:00Z', - 'entryPerArea': dict({ - 'SE3': 925.05, - 'SE4': 1081.72, + dict({ + 'deliveryEnd': '2024-11-04T15:00:00Z', + 'deliveryStart': '2024-11-04T14:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1359.57, + 'SE4': 1561.12, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T13:00:00Z', - 'deliveryStart': '2024-11-05T12:00:00Z', - 'entryPerArea': dict({ - 'SE3': 949.49, - 'SE4': 1130.38, + dict({ + 'deliveryEnd': '2024-11-04T16:00:00Z', + 'deliveryStart': '2024-11-04T15:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1848.35, + 'SE4': 2145.84, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T14:00:00Z', - 'deliveryStart': '2024-11-05T13:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1042.03, - 'SE4': 1256.91, + dict({ + 'deliveryEnd': '2024-11-04T17:00:00Z', + 'deliveryStart': '2024-11-04T16:00:00Z', + 'entryPerArea': dict({ + 'SE3': 2812.53, + 'SE4': 3313.53, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T15:00:00Z', - 'deliveryStart': '2024-11-05T14:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1258.89, - 'SE4': 1765.82, + dict({ + 'deliveryEnd': '2024-11-04T18:00:00Z', + 'deliveryStart': '2024-11-04T17:00:00Z', + 'entryPerArea': dict({ + 'SE3': 2351.69, + 'SE4': 2751.87, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T16:00:00Z', - 'deliveryStart': '2024-11-05T15:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1816.45, - 'SE4': 2522.55, + dict({ + 'deliveryEnd': '2024-11-04T19:00:00Z', + 'deliveryStart': '2024-11-04T18:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1553.08, + 'SE4': 1842.77, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T17:00:00Z', - 'deliveryStart': '2024-11-05T16:00:00Z', - 'entryPerArea': dict({ - 'SE3': 2512.65, - 'SE4': 3533.03, + dict({ + 'deliveryEnd': '2024-11-04T20:00:00Z', + 'deliveryStart': '2024-11-04T19:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1165.02, + 'SE4': 1398.35, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T18:00:00Z', - 'deliveryStart': '2024-11-05T17:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1819.83, - 'SE4': 2524.06, + dict({ + 'deliveryEnd': '2024-11-04T21:00:00Z', + 'deliveryStart': '2024-11-04T20:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1007.48, + 'SE4': 1172.35, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T19:00:00Z', - 'deliveryStart': '2024-11-05T18:00:00Z', - 'entryPerArea': dict({ - 'SE3': 1011.77, - 'SE4': 1804.46, + dict({ + 'deliveryEnd': '2024-11-04T22:00:00Z', + 'deliveryStart': '2024-11-04T21:00:00Z', + 'entryPerArea': dict({ + 'SE3': 792.09, + 'SE4': 920.28, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T20:00:00Z', - 'deliveryStart': '2024-11-05T19:00:00Z', - 'entryPerArea': dict({ - 'SE3': 835.53, - 'SE4': 1112.57, + dict({ + 'deliveryEnd': '2024-11-04T23:00:00Z', + 'deliveryStart': '2024-11-04T22:00:00Z', + 'entryPerArea': dict({ + 'SE3': 465.38, + 'SE4': 528.83, + }), }), - }), - dict({ - 'deliveryEnd': '2024-11-05T21:00:00Z', - 'deliveryStart': '2024-11-05T20:00:00Z', - 'entryPerArea': dict({ - 'SE3': 796.19, - 'SE4': 1051.69, + ]), + 'updatedAt': '2024-11-04T08:09:11.1931991Z', + 'version': 3, + }), + '2024-11-05': dict({ + 'areaAverages': list([ + dict({ + 'areaCode': 'SE3', + 'price': 900.74, }), - }), - dict({ - 'deliveryEnd': '2024-11-05T22:00:00Z', - 'deliveryStart': '2024-11-05T21:00:00Z', - 'entryPerArea': dict({ - 'SE3': 522.3, - 'SE4': 662.44, + dict({ + 'areaCode': 'SE4', + 'price': 1166.12, }), - }), - dict({ - 'deliveryEnd': '2024-11-05T23:00:00Z', - 'deliveryStart': '2024-11-05T22:00:00Z', - 'entryPerArea': dict({ - 'SE3': 289.14, - 'SE4': 349.21, + ]), + 'areaStates': list([ + dict({ + 'areas': list([ + 'SE3', + 'SE4', + ]), + 'state': 'Final', }), - }), - ]), - 'updatedAt': '2024-11-04T12:15:03.9456464Z', - 'version': 3, + ]), + 'blockPriceAggregates': list([ + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 422.87, + 'max': 1406.14, + 'min': 61.69, + }), + 'SE4': dict({ + 'average': 497.97, + 'max': 1648.25, + 'min': 65.19, + }), + }), + 'blockName': 'Off-peak 1', + 'deliveryEnd': '2024-11-05T07:00:00Z', + 'deliveryStart': '2024-11-04T23:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 1315.97, + 'max': 2512.65, + 'min': 925.05, + }), + 'SE4': dict({ + 'average': 1735.59, + 'max': 3533.03, + 'min': 1081.72, + }), + }), + 'blockName': 'Peak', + 'deliveryEnd': '2024-11-05T19:00:00Z', + 'deliveryStart': '2024-11-05T07:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 610.79, + 'max': 835.53, + 'min': 289.14, + }), + 'SE4': dict({ + 'average': 793.98, + 'max': 1112.57, + 'min': 349.21, + }), + }), + 'blockName': 'Off-peak 2', + 'deliveryEnd': '2024-11-05T23:00:00Z', + 'deliveryStart': '2024-11-05T19:00:00Z', + }), + ]), + 'currency': 'SEK', + 'deliveryAreas': list([ + 'SE3', + 'SE4', + ]), + 'deliveryDateCET': '2024-11-05', + 'exchangeRate': 11.6402, + 'market': 'DayAhead', + 'multiAreaEntries': list([ + dict({ + 'deliveryEnd': '2024-11-05T00:00:00Z', + 'deliveryStart': '2024-11-04T23:00:00Z', + 'entryPerArea': dict({ + 'SE3': 250.73, + 'SE4': 283.79, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T01:00:00Z', + 'deliveryStart': '2024-11-05T00:00:00Z', + 'entryPerArea': dict({ + 'SE3': 76.36, + 'SE4': 81.36, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T02:00:00Z', + 'deliveryStart': '2024-11-05T01:00:00Z', + 'entryPerArea': dict({ + 'SE3': 73.92, + 'SE4': 79.15, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T03:00:00Z', + 'deliveryStart': '2024-11-05T02:00:00Z', + 'entryPerArea': dict({ + 'SE3': 61.69, + 'SE4': 65.19, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T04:00:00Z', + 'deliveryStart': '2024-11-05T03:00:00Z', + 'entryPerArea': dict({ + 'SE3': 64.6, + 'SE4': 68.44, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T05:00:00Z', + 'deliveryStart': '2024-11-05T04:00:00Z', + 'entryPerArea': dict({ + 'SE3': 453.27, + 'SE4': 516.71, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T06:00:00Z', + 'deliveryStart': '2024-11-05T05:00:00Z', + 'entryPerArea': dict({ + 'SE3': 996.28, + 'SE4': 1240.85, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T07:00:00Z', + 'deliveryStart': '2024-11-05T06:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1406.14, + 'SE4': 1648.25, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T08:00:00Z', + 'deliveryStart': '2024-11-05T07:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1346.54, + 'SE4': 1570.5, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T09:00:00Z', + 'deliveryStart': '2024-11-05T08:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1150.28, + 'SE4': 1345.37, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T10:00:00Z', + 'deliveryStart': '2024-11-05T09:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1031.32, + 'SE4': 1206.51, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T11:00:00Z', + 'deliveryStart': '2024-11-05T10:00:00Z', + 'entryPerArea': dict({ + 'SE3': 927.37, + 'SE4': 1085.8, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T12:00:00Z', + 'deliveryStart': '2024-11-05T11:00:00Z', + 'entryPerArea': dict({ + 'SE3': 925.05, + 'SE4': 1081.72, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T13:00:00Z', + 'deliveryStart': '2024-11-05T12:00:00Z', + 'entryPerArea': dict({ + 'SE3': 949.49, + 'SE4': 1130.38, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T14:00:00Z', + 'deliveryStart': '2024-11-05T13:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1042.03, + 'SE4': 1256.91, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T15:00:00Z', + 'deliveryStart': '2024-11-05T14:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1258.89, + 'SE4': 1765.82, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T16:00:00Z', + 'deliveryStart': '2024-11-05T15:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1816.45, + 'SE4': 2522.55, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T17:00:00Z', + 'deliveryStart': '2024-11-05T16:00:00Z', + 'entryPerArea': dict({ + 'SE3': 2512.65, + 'SE4': 3533.03, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T18:00:00Z', + 'deliveryStart': '2024-11-05T17:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1819.83, + 'SE4': 2524.06, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T19:00:00Z', + 'deliveryStart': '2024-11-05T18:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1011.77, + 'SE4': 1804.46, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T20:00:00Z', + 'deliveryStart': '2024-11-05T19:00:00Z', + 'entryPerArea': dict({ + 'SE3': 835.53, + 'SE4': 1112.57, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T21:00:00Z', + 'deliveryStart': '2024-11-05T20:00:00Z', + 'entryPerArea': dict({ + 'SE3': 796.19, + 'SE4': 1051.69, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T22:00:00Z', + 'deliveryStart': '2024-11-05T21:00:00Z', + 'entryPerArea': dict({ + 'SE3': 522.3, + 'SE4': 662.44, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T23:00:00Z', + 'deliveryStart': '2024-11-05T22:00:00Z', + 'entryPerArea': dict({ + 'SE3': 289.14, + 'SE4': 349.21, + }), + }), + ]), + 'updatedAt': '2024-11-04T12:15:03.9456464Z', + 'version': 3, + }), + '2024-11-06': dict({ + 'areaAverages': list([ + dict({ + 'areaCode': 'SE3', + 'price': 900.65, + }), + dict({ + 'areaCode': 'SE4', + 'price': 1581.19, + }), + ]), + 'areaStates': list([ + dict({ + 'areas': list([ + 'SE3', + 'SE4', + ]), + 'state': 'Final', + }), + ]), + 'blockPriceAggregates': list([ + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 422.51, + 'max': 1820.5, + 'min': 74.06, + }), + 'SE4': dict({ + 'average': 706.61, + 'max': 2449.96, + 'min': 157.34, + }), + }), + 'blockName': 'Off-peak 1', + 'deliveryEnd': '2024-11-06T07:00:00Z', + 'deliveryStart': '2024-11-05T23:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 1346.82, + 'max': 2366.57, + 'min': 903.31, + }), + 'SE4': dict({ + 'average': 2306.88, + 'max': 5511.77, + 'min': 1362.84, + }), + }), + 'blockName': 'Peak', + 'deliveryEnd': '2024-11-06T19:00:00Z', + 'deliveryStart': '2024-11-06T07:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 518.43, + 'max': 716.82, + 'min': 250.64, + }), + 'SE4': dict({ + 'average': 1153.25, + 'max': 1624.33, + 'min': 539.42, + }), + }), + 'blockName': 'Off-peak 2', + 'deliveryEnd': '2024-11-06T23:00:00Z', + 'deliveryStart': '2024-11-06T19:00:00Z', + }), + ]), + 'currency': 'SEK', + 'deliveryAreas': list([ + 'SE3', + 'SE4', + ]), + 'deliveryDateCET': '2024-11-06', + 'exchangeRate': 11.66314, + 'market': 'DayAhead', + 'multiAreaEntries': list([ + dict({ + 'deliveryEnd': '2024-11-06T00:00:00Z', + 'deliveryStart': '2024-11-05T23:00:00Z', + 'entryPerArea': dict({ + 'SE3': 126.66, + 'SE4': 275.6, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T01:00:00Z', + 'deliveryStart': '2024-11-06T00:00:00Z', + 'entryPerArea': dict({ + 'SE3': 74.06, + 'SE4': 157.34, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T02:00:00Z', + 'deliveryStart': '2024-11-06T01:00:00Z', + 'entryPerArea': dict({ + 'SE3': 78.38, + 'SE4': 165.62, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T03:00:00Z', + 'deliveryStart': '2024-11-06T02:00:00Z', + 'entryPerArea': dict({ + 'SE3': 92.37, + 'SE4': 196.17, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T04:00:00Z', + 'deliveryStart': '2024-11-06T03:00:00Z', + 'entryPerArea': dict({ + 'SE3': 99.14, + 'SE4': 190.58, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T05:00:00Z', + 'deliveryStart': '2024-11-06T04:00:00Z', + 'entryPerArea': dict({ + 'SE3': 447.51, + 'SE4': 932.93, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T06:00:00Z', + 'deliveryStart': '2024-11-06T05:00:00Z', + 'entryPerArea': dict({ + 'SE3': 641.47, + 'SE4': 1284.69, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T07:00:00Z', + 'deliveryStart': '2024-11-06T06:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1820.5, + 'SE4': 2449.96, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T08:00:00Z', + 'deliveryStart': '2024-11-06T07:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1723.0, + 'SE4': 2244.22, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T09:00:00Z', + 'deliveryStart': '2024-11-06T08:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1298.57, + 'SE4': 1643.45, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T10:00:00Z', + 'deliveryStart': '2024-11-06T09:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1099.25, + 'SE4': 1507.23, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T11:00:00Z', + 'deliveryStart': '2024-11-06T10:00:00Z', + 'entryPerArea': dict({ + 'SE3': 903.31, + 'SE4': 1362.84, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T12:00:00Z', + 'deliveryStart': '2024-11-06T11:00:00Z', + 'entryPerArea': dict({ + 'SE3': 959.99, + 'SE4': 1376.13, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T13:00:00Z', + 'deliveryStart': '2024-11-06T12:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1186.61, + 'SE4': 1449.96, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T14:00:00Z', + 'deliveryStart': '2024-11-06T13:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1307.67, + 'SE4': 1608.35, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T15:00:00Z', + 'deliveryStart': '2024-11-06T14:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1385.46, + 'SE4': 2110.8, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T16:00:00Z', + 'deliveryStart': '2024-11-06T15:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1366.8, + 'SE4': 3031.25, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T17:00:00Z', + 'deliveryStart': '2024-11-06T16:00:00Z', + 'entryPerArea': dict({ + 'SE3': 2366.57, + 'SE4': 5511.77, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T18:00:00Z', + 'deliveryStart': '2024-11-06T17:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1481.92, + 'SE4': 3351.64, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T19:00:00Z', + 'deliveryStart': '2024-11-06T18:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1082.69, + 'SE4': 2484.95, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T20:00:00Z', + 'deliveryStart': '2024-11-06T19:00:00Z', + 'entryPerArea': dict({ + 'SE3': 716.82, + 'SE4': 1624.33, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T21:00:00Z', + 'deliveryStart': '2024-11-06T20:00:00Z', + 'entryPerArea': dict({ + 'SE3': 583.16, + 'SE4': 1306.27, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T22:00:00Z', + 'deliveryStart': '2024-11-06T21:00:00Z', + 'entryPerArea': dict({ + 'SE3': 523.09, + 'SE4': 1142.99, + }), + }), + dict({ + 'deliveryEnd': '2024-11-06T23:00:00Z', + 'deliveryStart': '2024-11-06T22:00:00Z', + 'entryPerArea': dict({ + 'SE3': 250.64, + 'SE4': 539.42, + }), + }), + ]), + 'updatedAt': '2024-11-05T12:12:51.9853434Z', + 'version': 3, + }), }), }) # --- diff --git a/tests/components/nordpool/test_config_flow.py b/tests/components/nordpool/test_config_flow.py index cfdfc63aca7..1f0e99b65ff 100644 --- a/tests/components/nordpool/test_config_flow.py +++ b/tests/components/nordpool/test_config_flow.py @@ -2,10 +2,11 @@ from __future__ import annotations +from typing import Any from unittest.mock import patch from pynordpool import ( - DeliveryPeriodData, + NordPoolClient, NordPoolConnectionError, NordPoolEmptyResponseError, NordPoolError, @@ -22,10 +23,11 @@ from homeassistant.data_entry_flow import FlowResultType from . import ENTRY_CONFIG from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker @pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") -async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: +async def test_form(hass: HomeAssistant, get_client: NordPoolClient) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -34,17 +36,11 @@ async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: assert result["step_id"] == "user" assert result["type"] is FlowResultType.FORM - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - ENTRY_CONFIG, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + ENTRY_CONFIG, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["version"] == 1 @@ -54,7 +50,7 @@ async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: @pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") async def test_single_config_entry( - hass: HomeAssistant, load_int: None, get_data: DeliveryPeriodData + hass: HomeAssistant, load_int: None, get_client: NordPoolClient ) -> None: """Test abort for single config entry.""" @@ -77,7 +73,7 @@ async def test_single_config_entry( ) async def test_cannot_connect( hass: HomeAssistant, - get_data: DeliveryPeriodData, + get_client: NordPoolClient, error_message: Exception, p_error: str, ) -> None: @@ -101,14 +97,10 @@ async def test_cannot_connect( assert result["errors"] == {"base": p_error} - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=ENTRY_CONFIG, - ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=ENTRY_CONFIG, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Nord Pool" @@ -119,25 +111,18 @@ async def test_cannot_connect( async def test_reconfigure( hass: HomeAssistant, load_int: MockConfigEntry, - get_data: DeliveryPeriodData, ) -> None: """Test reconfiguration.""" result = await load_int.start_reconfigure_flow(hass) - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_AREAS: ["SE3"], - CONF_CURRENCY: "EUR", - }, - ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" @@ -162,7 +147,8 @@ async def test_reconfigure( async def test_reconfigure_cannot_connect( hass: HomeAssistant, load_int: MockConfigEntry, - get_data: DeliveryPeriodData, + aioclient_mock: AiohttpClientMocker, + load_json: list[dict[str, Any]], error_message: Exception, p_error: str, ) -> None: @@ -184,17 +170,13 @@ async def test_reconfigure_cannot_connect( assert result["errors"] == {"base": p_error} - with patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_AREAS: ["SE3"], - CONF_CURRENCY: "EUR", - }, - ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/nordpool/test_coordinator.py b/tests/components/nordpool/test_coordinator.py index 68534237dee..7647fe4bdfe 100644 --- a/tests/components/nordpool/test_coordinator.py +++ b/tests/components/nordpool/test_coordinator.py @@ -7,8 +7,8 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory from pynordpool import ( - DeliveryPeriodData, NordPoolAuthenticationError, + NordPoolClient, NordPoolEmptyResponseError, NordPoolError, NordPoolResponseError, @@ -28,7 +28,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed @pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") async def test_coordinator( hass: HomeAssistant, - get_data: DeliveryPeriodData, + get_client: NordPoolClient, freezer: FrozenDateTimeFactory, caplog: pytest.LogCaptureFixture, ) -> None: @@ -41,30 +41,31 @@ async def test_coordinator( config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == "0.92737" + with ( patch( "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=NordPoolError("error"), ) as mock_data, ): - mock_data.return_value = get_data - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == "0.92737" - mock_data.reset_mock() - - mock_data.side_effect = NordPoolError("error") freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert mock_data.call_count == 4 state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE - mock_data.reset_mock() + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=NordPoolAuthenticationError("Authentication error"), + ) as mock_data, + ): assert "Authentication error" not in caplog.text - mock_data.side_effect = NordPoolAuthenticationError("Authentication error") freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -72,10 +73,14 @@ async def test_coordinator( state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Authentication error" in caplog.text - mock_data.reset_mock() + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=NordPoolEmptyResponseError("Empty response"), + ) as mock_data, + ): assert "Empty response" not in caplog.text - mock_data.side_effect = NordPoolEmptyResponseError("Empty response") freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -83,10 +88,14 @@ async def test_coordinator( state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Empty response" in caplog.text - mock_data.reset_mock() + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=NordPoolResponseError("Response error"), + ) as mock_data, + ): assert "Response error" not in caplog.text - mock_data.side_effect = NordPoolResponseError("Response error") freezer.tick(timedelta(hours=1)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) @@ -94,13 +103,9 @@ async def test_coordinator( state = hass.states.get("sensor.nord_pool_se3_current_price") assert state.state == STATE_UNAVAILABLE assert "Response error" in caplog.text - mock_data.reset_mock() - mock_data.return_value = get_data - mock_data.side_effect = None - freezer.tick(timedelta(hours=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_data.assert_called_once() - state = hass.states.get("sensor.nord_pool_se3_current_price") - assert state.state == "1.81645" + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == "1.81645" diff --git a/tests/components/nordpool/test_diagnostics.py b/tests/components/nordpool/test_diagnostics.py index 4639186ecf1..a9dfdd5eca5 100644 --- a/tests/components/nordpool/test_diagnostics.py +++ b/tests/components/nordpool/test_diagnostics.py @@ -2,19 +2,21 @@ from __future__ import annotations +import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator +@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - load_int: ConfigEntry, + load_int: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test generating diagnostics for a config entry.""" diff --git a/tests/components/nordpool/test_init.py b/tests/components/nordpool/test_init.py index ebebb8b60c1..3b1fc1fd8ec 100644 --- a/tests/components/nordpool/test_init.py +++ b/tests/components/nordpool/test_init.py @@ -5,7 +5,7 @@ from __future__ import annotations from unittest.mock import patch from pynordpool import ( - DeliveryPeriodData, + NordPoolClient, NordPoolConnectionError, NordPoolEmptyResponseError, NordPoolError, @@ -22,7 +22,8 @@ from . import ENTRY_CONFIG from tests.common import MockConfigEntry -async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: +@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") +async def test_unload_entry(hass: HomeAssistant, get_client: NordPoolClient) -> None: """Test load and unload an entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -31,13 +32,7 @@ async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) - ) entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) + await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) assert entry.state is ConfigEntryState.LOADED @@ -56,7 +51,7 @@ async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) - ], ) async def test_initial_startup_fails( - hass: HomeAssistant, get_data: DeliveryPeriodData, error: Exception + hass: HomeAssistant, get_client: NordPoolClient, error: Exception ) -> None: """Test load and unload an entry.""" entry = MockConfigEntry( diff --git a/tests/components/nordpool/test_sensor.py b/tests/components/nordpool/test_sensor.py index 5c2d138cb34..a1a27b5feec 100644 --- a/tests/components/nordpool/test_sensor.py +++ b/tests/components/nordpool/test_sensor.py @@ -6,7 +6,6 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -38,12 +37,12 @@ async def test_sensor_no_next_price(hass: HomeAssistant, load_int: ConfigEntry) assert current_price is not None assert last_price is not None assert next_price is not None - assert current_price.state == "0.28914" - assert last_price.state == "0.28914" - assert next_price.state == STATE_UNKNOWN + assert current_price.state == "0.12666" # SE3 2024-11-05T23:00:00Z + assert last_price.state == "0.28914" # SE3 2024-11-05T22:00:00Z + assert next_price.state == "0.07406" # SE3 2024-11-06T00:00:00Z" -@pytest.mark.freeze_time("2024-11-05T00:00:00+01:00") +@pytest.mark.freeze_time("2024-11-06T00:00:00+01:00") @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_no_previous_price( hass: HomeAssistant, load_int: ConfigEntry @@ -57,6 +56,6 @@ async def test_sensor_no_previous_price( assert current_price is not None assert last_price is not None assert next_price is not None - assert current_price.state == "0.25073" - assert last_price.state == STATE_UNKNOWN - assert next_price.state == "0.07636" + assert current_price.state == "0.12666" # SE3 2024-11-05T23:00:00Z + assert last_price.state == "0.28914" # SE3 2024-11-05T22:00:00Z + assert next_price.state == "0.07406" # SE3 2024-11-06T00:00:00Z diff --git a/tests/components/nordpool/test_services.py b/tests/components/nordpool/test_services.py index 224b4bc9981..6d6af685d28 100644 --- a/tests/components/nordpool/test_services.py +++ b/tests/components/nordpool/test_services.py @@ -3,7 +3,6 @@ from unittest.mock import patch from pynordpool import ( - DeliveryPeriodData, NordPoolAuthenticationError, NordPoolEmptyResponseError, NordPoolError, @@ -28,7 +27,7 @@ TEST_SERVICE_DATA = { ATTR_CONFIG_ENTRY: "to_replace", ATTR_DATE: "2024-11-05", ATTR_AREAS: "SE3", - ATTR_CURRENCY: "SEK", + ATTR_CURRENCY: "EUR", } TEST_SERVICE_DATA_USE_DEFAULTS = { ATTR_CONFIG_ENTRY: "to_replace", @@ -40,45 +39,32 @@ TEST_SERVICE_DATA_USE_DEFAULTS = { async def test_service_call( hass: HomeAssistant, load_int: MockConfigEntry, - get_data: DeliveryPeriodData, snapshot: SnapshotAssertion, ) -> None: """Test get_prices_for_date service call.""" - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - service_data = TEST_SERVICE_DATA.copy() - service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id - response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_PRICES_FOR_DATE, - service_data, - blocking=True, - return_response=True, - ) + service_data = TEST_SERVICE_DATA.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) assert response == snapshot price_value = response["SE3"][0]["price"] - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - ): - service_data = TEST_SERVICE_DATA_USE_DEFAULTS.copy() - service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id - response = await hass.services.async_call( - DOMAIN, - SERVICE_GET_PRICES_FOR_DATE, - service_data, - blocking=True, - return_response=True, - ) + service_data = TEST_SERVICE_DATA_USE_DEFAULTS.copy() + service_data[ATTR_CONFIG_ENTRY] = load_int.entry_id + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_PRICES_FOR_DATE, + service_data, + blocking=True, + return_response=True, + ) assert "SE3" in response assert response["SE3"][0]["price"] == price_value @@ -124,17 +110,10 @@ async def test_service_call_failures( async def test_service_call_config_entry_bad_state( hass: HomeAssistant, load_int: MockConfigEntry, - get_data: DeliveryPeriodData, ) -> None: """Test get_prices_for_date service call when config entry bad state.""" - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - pytest.raises(ServiceValidationError) as err, - ): + with pytest.raises(ServiceValidationError) as err: await hass.services.async_call( DOMAIN, SERVICE_GET_PRICES_FOR_DATE, @@ -149,13 +128,7 @@ async def test_service_call_config_entry_bad_state( await hass.config_entries.async_unload(load_int.entry_id) await hass.async_block_till_done() - with ( - patch( - "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", - return_value=get_data, - ), - pytest.raises(ServiceValidationError) as err, - ): + with pytest.raises(ServiceValidationError) as err: await hass.services.async_call( DOMAIN, SERVICE_GET_PRICES_FOR_DATE, From 0f18f128fda6384dacc71588db267cb6c934cc21 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Sun, 22 Dec 2024 21:50:30 +0100 Subject: [PATCH 641/677] Unifiprotect Add user information retrieval for NFC and fingerprint events (#132604) Co-authored-by: J. Nick Koston --- .../components/unifiprotect/event.py | 62 ++- tests/components/unifiprotect/test_event.py | 368 +++++++++++++++++- 2 files changed, 417 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/unifiprotect/event.py b/homeassistant/components/unifiprotect/event.py index f126920fb18..c8bce183e34 100644 --- a/homeassistant/components/unifiprotect/event.py +++ b/homeassistant/components/unifiprotect/event.py @@ -4,8 +4,6 @@ from __future__ import annotations import dataclasses -from uiprotect.data import Camera, EventType, ProtectAdoptableDeviceModel - from homeassistant.components.event import ( EventDeviceClass, EventEntity, @@ -14,17 +12,43 @@ from homeassistant.components.event import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import Bootstrap from .const import ( ATTR_EVENT_ID, EVENT_TYPE_DOORBELL_RING, EVENT_TYPE_FINGERPRINT_IDENTIFIED, EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED, EVENT_TYPE_NFC_SCANNED, + KEYRINGS_KEY_TYPE_ID_NFC, + KEYRINGS_ULP_ID, + KEYRINGS_USER_FULL_NAME, + KEYRINGS_USER_STATUS, +) +from .data import ( + Camera, + EventType, + ProtectAdoptableDeviceModel, + ProtectData, + ProtectDeviceType, + UFPConfigEntry, ) -from .data import ProtectData, ProtectDeviceType, UFPConfigEntry from .entity import EventEntityMixin, ProtectDeviceEntity, ProtectEventMixin +def _add_ulp_user_infos( + bootstrap: Bootstrap, event_data: dict[str, str], ulp_id: str +) -> None: + """Add ULP user information to the event data.""" + if ulp_usr := bootstrap.ulp_users.by_ulp_id(ulp_id): + event_data.update( + { + KEYRINGS_ULP_ID: ulp_usr.ulp_id, + KEYRINGS_USER_FULL_NAME: ulp_usr.full_name, + KEYRINGS_USER_STATUS: ulp_usr.status, + } + ) + + @dataclasses.dataclass(frozen=True, kw_only=True) class ProtectEventEntityDescription(ProtectEventMixin, EventEntityDescription): """Describes UniFi Protect event entity.""" @@ -78,9 +102,22 @@ class ProtectDeviceNFCEventEntity(EventEntityMixin, ProtectDeviceEntity, EventEn and not self._event_already_ended(prev_event, prev_event_end) and event.type is EventType.NFC_CARD_SCANNED ): - event_data = {ATTR_EVENT_ID: event.id} + event_data = { + ATTR_EVENT_ID: event.id, + KEYRINGS_USER_FULL_NAME: "", + KEYRINGS_ULP_ID: "", + KEYRINGS_USER_STATUS: "", + KEYRINGS_KEY_TYPE_ID_NFC: "", + } + if event.metadata and event.metadata.nfc and event.metadata.nfc.nfc_id: - event_data["nfc_id"] = event.metadata.nfc.nfc_id + nfc_id = event.metadata.nfc.nfc_id + event_data[KEYRINGS_KEY_TYPE_ID_NFC] = nfc_id + keyring = self.data.api.bootstrap.keyrings.by_registry_id(nfc_id) + if keyring and keyring.ulp_user: + _add_ulp_user_infos( + self.data.api.bootstrap, event_data, keyring.ulp_user + ) self._trigger_event(EVENT_TYPE_NFC_SCANNED, event_data) self.async_write_ha_state() @@ -109,17 +146,22 @@ class ProtectDeviceFingerprintEventEntity( and not self._event_already_ended(prev_event, prev_event_end) and event.type is EventType.FINGERPRINT_IDENTIFIED ): - event_data = {ATTR_EVENT_ID: event.id} + event_data = { + ATTR_EVENT_ID: event.id, + KEYRINGS_USER_FULL_NAME: "", + KEYRINGS_ULP_ID: "", + } + event_identified = EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED if ( event.metadata and event.metadata.fingerprint and event.metadata.fingerprint.ulp_id ): - event_data["ulp_id"] = event.metadata.fingerprint.ulp_id event_identified = EVENT_TYPE_FINGERPRINT_IDENTIFIED - else: - event_data["ulp_id"] = "" - event_identified = EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED + ulp_id = event.metadata.fingerprint.ulp_id + if ulp_id: + event_data[KEYRINGS_ULP_ID] = ulp_id + _add_ulp_user_infos(self.data.api.bootstrap, event_data, ulp_id) self._trigger_event(event_identified, event_data) self.async_write_ha_state() diff --git a/tests/components/unifiprotect/test_event.py b/tests/components/unifiprotect/test_event.py index 6a26738f5e8..f674e14b519 100644 --- a/tests/components/unifiprotect/test_event.py +++ b/tests/components/unifiprotect/test_event.py @@ -175,6 +175,10 @@ async def test_doorbell_nfc_scanned( Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[1] ) + ulp_id = "ulp_id" + test_user_full_name = "Test User" + test_nfc_id = "test_nfc_id" + unsub = async_track_state_change_event(hass, entity_id, _capture_event) event = Event( model=ModelType.EVENT, @@ -187,7 +191,224 @@ async def test_doorbell_nfc_scanned( smart_detect_event_ids=[], camera_id=doorbell.id, api=ufp.api, - metadata={"nfc": {"nfc_id": "test_nfc_id", "user_id": "test_user_id"}}, + metadata={"nfc": {"nfc_id": test_nfc_id, "user_id": "test_user_id"}}, + ) + + new_camera = doorbell.copy() + new_camera.last_nfc_card_scanned_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_keyring = Mock() + mock_keyring.registry_id = test_nfc_id + mock_keyring.registry_type = "nfc" + mock_keyring.ulp_user = ulp_id + ufp.api.bootstrap.keyrings.add(mock_keyring) + + mock_ulp_user = Mock() + mock_ulp_user.ulp_id = ulp_id + mock_ulp_user.full_name = test_user_full_name + mock_ulp_user.status = "ACTIVE" + ufp.api.bootstrap.ulp_users.add(mock_ulp_user) + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["nfc_id"] == "test_nfc_id" + assert state.attributes["full_name"] == test_user_full_name + + unsub() + + +async def test_doorbell_nfc_scanned_ulpusr_deactivated( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell NFC scanned event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[1] + ) + + ulp_id = "ulp_id" + test_user_full_name = "Test User" + test_nfc_id = "test_nfc_id" + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.NFC_CARD_SCANNED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"nfc": {"nfc_id": test_nfc_id, "user_id": "test_user_id"}}, + ) + + new_camera = doorbell.copy() + new_camera.last_nfc_card_scanned_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_keyring = Mock() + mock_keyring.registry_id = test_nfc_id + mock_keyring.registry_type = "nfc" + mock_keyring.ulp_user = ulp_id + ufp.api.bootstrap.keyrings.add(mock_keyring) + + mock_ulp_user = Mock() + mock_ulp_user.ulp_id = ulp_id + mock_ulp_user.full_name = test_user_full_name + mock_ulp_user.status = "DEACTIVATED" + ufp.api.bootstrap.ulp_users.add(mock_ulp_user) + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["nfc_id"] == "test_nfc_id" + assert state.attributes["full_name"] == "Test User" + assert state.attributes["user_status"] == "DEACTIVATED" + + unsub() + + +async def test_doorbell_nfc_scanned_no_ulpusr( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell NFC scanned event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[1] + ) + + ulp_id = "ulp_id" + test_nfc_id = "test_nfc_id" + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.NFC_CARD_SCANNED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"nfc": {"nfc_id": test_nfc_id, "user_id": "test_user_id"}}, + ) + + new_camera = doorbell.copy() + new_camera.last_nfc_card_scanned_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_keyring = Mock() + mock_keyring.registry_id = test_nfc_id + mock_keyring.registry_type = "nfc" + mock_keyring.ulp_user = ulp_id + ufp.api.bootstrap.keyrings.add(mock_keyring) + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["nfc_id"] == "test_nfc_id" + assert state.attributes["full_name"] == "" + + unsub() + + +async def test_doorbell_nfc_scanned_no_keyring( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell NFC scanned event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[1] + ) + + test_nfc_id = "test_nfc_id" + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.NFC_CARD_SCANNED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"nfc": {"nfc_id": test_nfc_id, "user_id": "test_user_id"}}, ) new_camera = doorbell.model_copy() @@ -208,6 +429,7 @@ async def test_doorbell_nfc_scanned( assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION assert state.attributes[ATTR_EVENT_ID] == "test_event_id" assert state.attributes["nfc_id"] == "test_nfc_id" + assert state.attributes["full_name"] == "" unsub() @@ -233,6 +455,9 @@ async def test_doorbell_fingerprint_identified( Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[2] ) + ulp_id = "ulp_id" + test_user_full_name = "Test User" + unsub = async_track_state_change_event(hass, entity_id, _capture_event) event = Event( model=ModelType.EVENT, @@ -245,7 +470,143 @@ async def test_doorbell_fingerprint_identified( smart_detect_event_ids=[], camera_id=doorbell.id, api=ufp.api, - metadata={"fingerprint": {"ulp_id": "test_ulp_id"}}, + metadata={"fingerprint": {"ulp_id": ulp_id}}, + ) + + new_camera = doorbell.copy() + new_camera.last_fingerprint_identified_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_ulp_user = Mock() + mock_ulp_user.ulp_id = ulp_id + mock_ulp_user.full_name = test_user_full_name + mock_ulp_user.status = "ACTIVE" + ufp.api.bootstrap.ulp_users.add(mock_ulp_user) + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["ulp_id"] == ulp_id + assert state.attributes["full_name"] == test_user_full_name + + unsub() + + +async def test_doorbell_fingerprint_identified_user_deactivated( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell fingerprint identified event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[2] + ) + + ulp_id = "ulp_id" + test_user_full_name = "Test User" + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.FINGERPRINT_IDENTIFIED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"fingerprint": {"ulp_id": ulp_id}}, + ) + + new_camera = doorbell.copy() + new_camera.last_fingerprint_identified_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_ulp_user = Mock() + mock_ulp_user.ulp_id = ulp_id + mock_ulp_user.full_name = test_user_full_name + mock_ulp_user.status = "DEACTIVATED" + ufp.api.bootstrap.ulp_users.add(mock_ulp_user) + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["ulp_id"] == ulp_id + assert state.attributes["full_name"] == "Test User" + assert state.attributes["user_status"] == "DEACTIVATED" + + unsub() + + +async def test_doorbell_fingerprint_identified_no_user( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell fingerprint identified event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[2] + ) + + ulp_id = "ulp_id" + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.FINGERPRINT_IDENTIFIED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"fingerprint": {"ulp_id": ulp_id}}, ) new_camera = doorbell.model_copy() @@ -265,7 +626,8 @@ async def test_doorbell_fingerprint_identified( assert state assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION assert state.attributes[ATTR_EVENT_ID] == "test_event_id" - assert state.attributes["ulp_id"] == "test_ulp_id" + assert state.attributes["ulp_id"] == ulp_id + assert state.attributes["full_name"] == "" unsub() From ebcb478f5251ae3beb7960905b96d3bc2c4284f7 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sun, 22 Dec 2024 20:53:14 +0000 Subject: [PATCH 642/677] Add pan/tilt features to tplink integration (#133829) --- homeassistant/components/tplink/button.py | 12 ++ homeassistant/components/tplink/entity.py | 7 - homeassistant/components/tplink/icons.json | 18 ++ homeassistant/components/tplink/number.py | 8 + homeassistant/components/tplink/strings.json | 18 ++ .../tplink/snapshots/test_button.ambr | 184 ++++++++++++++++++ .../tplink/snapshots/test_number.ambr | 110 +++++++++++ 7 files changed, 350 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/tplink/button.py b/homeassistant/components/tplink/button.py index 131325e489d..6e0d34864d9 100644 --- a/homeassistant/components/tplink/button.py +++ b/homeassistant/components/tplink/button.py @@ -50,6 +50,18 @@ BUTTON_DESCRIPTIONS: Final = [ key="reboot", device_class=ButtonDeviceClass.RESTART, ), + TPLinkButtonEntityDescription( + key="pan_left", + ), + TPLinkButtonEntityDescription( + key="pan_right", + ), + TPLinkButtonEntityDescription( + key="tilt_up", + ), + TPLinkButtonEntityDescription( + key="tilt_down", + ), ] BUTTON_DESCRIPTIONS_MAP = {desc.key: desc for desc in BUTTON_DESCRIPTIONS} diff --git a/homeassistant/components/tplink/entity.py b/homeassistant/components/tplink/entity.py index 60d066012a2..d7b02b80177 100644 --- a/homeassistant/components/tplink/entity.py +++ b/homeassistant/components/tplink/entity.py @@ -73,13 +73,6 @@ EXCLUDED_FEATURES = { "check_latest_firmware", # siren "alarm", - # camera - "pan_left", - "pan_right", - "pan_step", - "tilt_up", - "tilt_down", - "tilt_step", } diff --git a/homeassistant/components/tplink/icons.json b/homeassistant/components/tplink/icons.json index 0abd68543c5..3f3a3b1233b 100644 --- a/homeassistant/components/tplink/icons.json +++ b/homeassistant/components/tplink/icons.json @@ -20,6 +20,18 @@ }, "stop_alarm": { "default": "mdi:bell-cancel" + }, + "pan_left": { + "default": "mdi:chevron-left" + }, + "pan_right": { + "default": "mdi:chevron-right" + }, + "tilt_up": { + "default": "mdi:chevron-up" + }, + "tilt_down": { + "default": "mdi:chevron-down" } }, "select": { @@ -117,6 +129,12 @@ }, "target_temperature": { "default": "mdi:thermometer" + }, + "pan_step": { + "default": "mdi:unfold-more-vertical" + }, + "tilt_step": { + "default": "mdi:unfold-more-horizontal" } } }, diff --git a/homeassistant/components/tplink/number.py b/homeassistant/components/tplink/number.py index b51c00db7c0..489805029ea 100644 --- a/homeassistant/components/tplink/number.py +++ b/homeassistant/components/tplink/number.py @@ -51,6 +51,14 @@ NUMBER_DESCRIPTIONS: Final = ( key="temperature_offset", mode=NumberMode.BOX, ), + TPLinkNumberEntityDescription( + key="pan_step", + mode=NumberMode.BOX, + ), + TPLinkNumberEntityDescription( + key="tilt_step", + mode=NumberMode.BOX, + ), ) NUMBER_DESCRIPTIONS_MAP = {desc.key: desc for desc in NUMBER_DESCRIPTIONS} diff --git a/homeassistant/components/tplink/strings.json b/homeassistant/components/tplink/strings.json index 7443636c3c0..5aa7c37d612 100644 --- a/homeassistant/components/tplink/strings.json +++ b/homeassistant/components/tplink/strings.json @@ -120,6 +120,18 @@ }, "stop_alarm": { "name": "Stop alarm" + }, + "pan_left": { + "name": "Pan left" + }, + "pan_right": { + "name": "Pan right" + }, + "tilt_up": { + "name": "Tilt up" + }, + "tilt_down": { + "name": "Tilt down" } }, "camera": { @@ -235,6 +247,12 @@ }, "temperature_offset": { "name": "Temperature offset" + }, + "pan_step": { + "name": "Pan degrees" + }, + "tilt_step": { + "name": "Tilt degrees" } } }, diff --git a/tests/components/tplink/snapshots/test_button.ambr b/tests/components/tplink/snapshots/test_button.ambr index bb75f4642e1..de626cd5818 100644 --- a/tests/components/tplink/snapshots/test_button.ambr +++ b/tests/components/tplink/snapshots/test_button.ambr @@ -1,4 +1,96 @@ # serializer version: 1 +# name: test_states[button.my_device_pan_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_pan_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pan left', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pan_left', + 'unique_id': '123456789ABCDEFGH_pan_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.my_device_pan_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Pan left', + }), + 'context': , + 'entity_id': 'button.my_device_pan_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[button.my_device_pan_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_pan_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pan right', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pan_right', + 'unique_id': '123456789ABCDEFGH_pan_right', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.my_device_pan_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Pan right', + }), + 'context': , + 'entity_id': 'button.my_device_pan_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_states[button.my_device_restart-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -124,6 +216,98 @@ 'state': 'unknown', }) # --- +# name: test_states[button.my_device_tilt_down-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_tilt_down', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tilt down', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tilt_down', + 'unique_id': '123456789ABCDEFGH_tilt_down', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.my_device_tilt_down-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Tilt down', + }), + 'context': , + 'entity_id': 'button.my_device_tilt_down', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_states[button.my_device_tilt_up-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.my_device_tilt_up', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tilt up', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tilt_up', + 'unique_id': '123456789ABCDEFGH_tilt_up', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[button.my_device_tilt_up-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Tilt up', + }), + 'context': , + 'entity_id': 'button.my_device_tilt_up', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_states[my_device-entry] DeviceRegistryEntrySnapshot({ 'area_id': None, diff --git a/tests/components/tplink/snapshots/test_number.ambr b/tests/components/tplink/snapshots/test_number.ambr index dbb58bac01b..df5ef71bf44 100644 --- a/tests/components/tplink/snapshots/test_number.ambr +++ b/tests/components/tplink/snapshots/test_number.ambr @@ -35,6 +35,61 @@ 'via_device_id': None, }) # --- +# name: test_states[number.my_device_pan_degrees-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.my_device_pan_degrees', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pan degrees', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pan_step', + 'unique_id': '123456789ABCDEFGH_pan_step', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.my_device_pan_degrees-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Pan degrees', + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.my_device_pan_degrees', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- # name: test_states[number.my_device_smooth_off-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -200,6 +255,61 @@ 'state': 'False', }) # --- +# name: test_states[number.my_device_tilt_degrees-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.my_device_tilt_degrees', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tilt degrees', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tilt_step', + 'unique_id': '123456789ABCDEFGH_tilt_step', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[number.my_device_tilt_degrees-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Tilt degrees', + 'max': 65536, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.my_device_tilt_degrees', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- # name: test_states[number.my_device_turn_off_in-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ From 2d2b979c7d7a5cc04f27ad19f72c7ac3127dd7a0 Mon Sep 17 00:00:00 2001 From: Josef Zweck Date: Sun, 22 Dec 2024 21:55:04 +0100 Subject: [PATCH 643/677] Bump pylamarzocco to 1.4.2 (#133826) --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index b34df6d6917..309b858c77c 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -37,5 +37,5 @@ "iot_class": "cloud_polling", "loggers": ["pylamarzocco"], "quality_scale": "platinum", - "requirements": ["pylamarzocco==1.4.1"] + "requirements": ["pylamarzocco==1.4.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 321f104fa1f..3133044fbe0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2040,7 +2040,7 @@ pykwb==0.0.8 pylacrosse==0.4 # homeassistant.components.lamarzocco -pylamarzocco==1.4.1 +pylamarzocco==1.4.2 # homeassistant.components.lastfm pylast==5.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 58004e5bd8b..3e024e0d40f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1654,7 +1654,7 @@ pykrakenapi==0.1.8 pykulersky==0.5.2 # homeassistant.components.lamarzocco -pylamarzocco==1.4.1 +pylamarzocco==1.4.2 # homeassistant.components.lastfm pylast==5.1.0 From c2358d51586741628a32db02d75466ffbb23e9d8 Mon Sep 17 00:00:00 2001 From: Lucas Gasenzer Date: Sun, 22 Dec 2024 22:37:57 +0100 Subject: [PATCH 644/677] Add Switchbot Water Leak Detector (BLE) (#133799) Co-authored-by: J. Nick Koston --- .../components/switchbot/__init__.py | 1 + .../components/switchbot/binary_sensor.py | 5 +++ homeassistant/components/switchbot/const.py | 2 + tests/components/switchbot/__init__.py | 24 +++++++++++ tests/components/switchbot/test_sensor.py | 43 +++++++++++++++++++ 5 files changed, 75 insertions(+) diff --git a/homeassistant/components/switchbot/__init__.py b/homeassistant/components/switchbot/__init__.py index 522258c2a55..499a5073872 100644 --- a/homeassistant/components/switchbot/__init__.py +++ b/homeassistant/components/switchbot/__init__.py @@ -64,6 +64,7 @@ PLATFORMS_BY_TYPE = { SupportedModels.HUB2.value: [Platform.SENSOR], SupportedModels.RELAY_SWITCH_1PM.value: [Platform.SWITCH, Platform.SENSOR], SupportedModels.RELAY_SWITCH_1.value: [Platform.SWITCH], + SupportedModels.LEAK.value: [Platform.BINARY_SENSOR, Platform.SENSOR], } CLASS_BY_DEVICE = { SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight, diff --git a/homeassistant/components/switchbot/binary_sensor.py b/homeassistant/components/switchbot/binary_sensor.py index a545ffd01ce..144872ff315 100644 --- a/homeassistant/components/switchbot/binary_sensor.py +++ b/homeassistant/components/switchbot/binary_sensor.py @@ -64,6 +64,11 @@ BINARY_SENSOR_TYPES: dict[str, BinarySensorEntityDescription] = { translation_key="door_auto_lock_paused", entity_category=EntityCategory.DIAGNOSTIC, ), + "leak": BinarySensorEntityDescription( + key="leak", + name=None, + device_class=BinarySensorDeviceClass.MOISTURE, + ), } diff --git a/homeassistant/components/switchbot/const.py b/homeassistant/components/switchbot/const.py index 383fd6b03b6..854ab32b657 100644 --- a/homeassistant/components/switchbot/const.py +++ b/homeassistant/components/switchbot/const.py @@ -33,6 +33,7 @@ class SupportedModels(StrEnum): HUB2 = "hub2" RELAY_SWITCH_1PM = "relay_switch_1pm" RELAY_SWITCH_1 = "relay_switch_1" + LEAK = "leak" CONNECTABLE_SUPPORTED_MODEL_TYPES = { @@ -58,6 +59,7 @@ NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = { SwitchbotModel.METER_PRO_C: SupportedModels.HYGROMETER_CO2, SwitchbotModel.CONTACT_SENSOR: SupportedModels.CONTACT, SwitchbotModel.MOTION_SENSOR: SupportedModels.MOTION, + SwitchbotModel.LEAK: SupportedModels.LEAK, } SUPPORTED_MODEL_TYPES = ( diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index c5ecebf21b3..9ecffd395a3 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -250,3 +250,27 @@ WORELAY_SWITCH_1PM_SERVICE_INFO = BluetoothServiceInfoBleak( connectable=True, tx_power=-127, ) + +LEAK_SERVICE_INFO = BluetoothServiceInfoBleak( + name="Any", + manufacturer_data={ + 2409: b"\xd6407D1\x02V\x90\x00\x00\x00\x00\x1e\x05\x00\x00\x00\x00" + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"&\\x00V"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + address="AA:BB:CC:DD:EE:FF", + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="Any", + manufacturer_data={ + 2409: b"\xd6407D1\x02V\x90\x00\x00\x00\x00\x1e\x05\x00\x00\x00\x00" + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"&\\x00V"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:FF", "Any"), + time=0, + connectable=False, + tx_power=-127, +) diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index 205bb739508..acf1bacc054 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -22,6 +22,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import ( + LEAK_SERVICE_INFO, WOHAND_SERVICE_INFO, WOMETERTHPC_SERVICE_INFO, WORELAY_SWITCH_1PM_SERVICE_INFO, @@ -151,3 +152,45 @@ async def test_relay_switch_1pm_power_sensor(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_leak_sensor(hass: HomeAssistant) -> None: + """Test setting up the leak detector.""" + await async_setup_component(hass, DOMAIN, {}) + inject_bluetooth_service_info(hass, LEAK_SERVICE_INFO) + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_NAME: "test-name", + CONF_SENSOR_TYPE: "leak", + }, + unique_id="aabbccddeeaa", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + battery_sensor = hass.states.get("sensor.test_name_battery") + battery_sensor_attrs = battery_sensor.attributes + assert battery_sensor.state == "86" + assert battery_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Battery" + assert battery_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert battery_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + rssi_sensor = hass.states.get("sensor.test_name_bluetooth_signal") + rssi_sensor_attrs = rssi_sensor.attributes + assert rssi_sensor.state == "-60" + assert rssi_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Bluetooth signal" + assert rssi_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "dBm" + + leak_sensor = hass.states.get("binary_sensor.test_name") + leak_sensor_attrs = leak_sensor.attributes + assert leak_sensor.state == "off" + assert leak_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() From c9ad87d4643a0d3f000ed6f83c8f442bb49a35a9 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 22 Dec 2024 22:44:15 +0100 Subject: [PATCH 645/677] Add light tests for Niko Home Control (#133750) --- .../components/niko_home_control/light.py | 1 + .../components/niko_home_control/conftest.py | 44 +++++- .../snapshots/test_light.ambr | 112 ++++++++++++++ .../niko_home_control/test_light.py | 138 ++++++++++++++++++ 4 files changed, 291 insertions(+), 4 deletions(-) create mode 100644 tests/components/niko_home_control/snapshots/test_light.ambr create mode 100644 tests/components/niko_home_control/test_light.py diff --git a/homeassistant/components/niko_home_control/light.py b/homeassistant/components/niko_home_control/light.py index c9902cbf11b..69d4e71c755 100644 --- a/homeassistant/components/niko_home_control/light.py +++ b/homeassistant/components/niko_home_control/light.py @@ -108,6 +108,7 @@ class NikoHomeControlLight(NikoHomeControlEntity, LightEntity): if action.is_dimmable: self._attr_color_mode = ColorMode.BRIGHTNESS self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + self._attr_brightness = round(action.state * 2.55) def turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" diff --git a/tests/components/niko_home_control/conftest.py b/tests/components/niko_home_control/conftest.py index 63307a88e8a..b3dedd0c182 100644 --- a/tests/components/niko_home_control/conftest.py +++ b/tests/components/niko_home_control/conftest.py @@ -3,6 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from nhc.light import NHCLight import pytest from homeassistant.components.niko_home_control.const import DOMAIN @@ -22,16 +23,48 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_niko_home_control_connection() -> Generator[AsyncMock]: +def light() -> NHCLight: + """Return a light mock.""" + mock = AsyncMock(spec=NHCLight) + mock.id = 1 + mock.type = 1 + mock.is_dimmable = False + mock.name = "light" + mock.suggested_area = "room" + mock.state = 100 + return mock + + +@pytest.fixture +def dimmable_light() -> NHCLight: + """Return a dimmable light mock.""" + mock = AsyncMock(spec=NHCLight) + mock.id = 2 + mock.type = 2 + mock.is_dimmable = True + mock.name = "dimmable light" + mock.suggested_area = "room" + mock.state = 100 + return mock + + +@pytest.fixture +def mock_niko_home_control_connection( + light: NHCLight, dimmable_light: NHCLight +) -> Generator[AsyncMock]: """Mock a NHC client.""" with ( patch( - "homeassistant.components.niko_home_control.config_flow.NHCController", + "homeassistant.components.niko_home_control.NHCController", autospec=True, ) as mock_client, + patch( + "homeassistant.components.niko_home_control.config_flow.NHCController", + new=mock_client, + ), ): client = mock_client.return_value - client.return_value = True + client.lights = [light, dimmable_light] yield client @@ -39,5 +72,8 @@ def mock_niko_home_control_connection() -> Generator[AsyncMock]: def mock_config_entry() -> MockConfigEntry: """Return the default mocked config entry.""" return MockConfigEntry( - domain=DOMAIN, title="Niko Home Control", data={CONF_HOST: "192.168.0.123"} + domain=DOMAIN, + title="Niko Home Control", + data={CONF_HOST: "192.168.0.123"}, + entry_id="01JFN93M7KRA38V5AMPCJ2JYYV", ) diff --git a/tests/components/niko_home_control/snapshots/test_light.ambr b/tests/components/niko_home_control/snapshots/test_light.ambr new file mode 100644 index 00000000000..702b7326ee2 --- /dev/null +++ b/tests/components/niko_home_control/snapshots/test_light.ambr @@ -0,0 +1,112 @@ +# serializer version: 1 +# name: test_entities[light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'niko_home_control', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01JFN93M7KRA38V5AMPCJ2JYYV-2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'dimmable light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entities[light.light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'niko_home_control', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01JFN93M7KRA38V5AMPCJ2JYYV-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[light.light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': , + 'friendly_name': 'light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/niko_home_control/test_light.py b/tests/components/niko_home_control/test_light.py new file mode 100644 index 00000000000..801bdf6a296 --- /dev/null +++ b/tests/components/niko_home_control/test_light.py @@ -0,0 +1,138 @@ +"""Tests for the Niko Home Control Light platform.""" + +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.niko_home_control.PLATFORMS", [Platform.LIGHT] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("light_id", "data", "set_brightness"), + [ + (0, {ATTR_ENTITY_ID: "light.light"}, 100.0), + ( + 1, + {ATTR_ENTITY_ID: "light.dimmable_light", ATTR_BRIGHTNESS: 50}, + 19.607843137254903, + ), + ], +) +async def test_turning_on( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + light_id: int, + data: dict[str, Any], + set_brightness: int, +) -> None: + """Test turning on the light.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + data, + blocking=True, + ) + mock_niko_home_control_connection.lights[light_id].turn_on.assert_called_once_with( + set_brightness + ) + + +@pytest.mark.parametrize( + ("light_id", "entity_id"), + [ + (0, "light.light"), + (1, "light.dimmable_light"), + ], +) +async def test_turning_off( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + light_id: int, + entity_id: str, +) -> None: + """Test turning on the light.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_niko_home_control_connection.lights[ + light_id + ].turn_off.assert_called_once_with() + + +async def test_updating( + hass: HomeAssistant, + mock_niko_home_control_connection: AsyncMock, + mock_config_entry: MockConfigEntry, + light: AsyncMock, + dimmable_light: AsyncMock, +) -> None: + """Test turning on the light.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("light.light").state == STATE_ON + + light.state = 0 + await mock_niko_home_control_connection.register_callback.call_args_list[0][0][1](0) + await hass.async_block_till_done() + + assert hass.states.get("light.light").state == STATE_OFF + + assert hass.states.get("light.dimmable_light").state == STATE_ON + assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 255 + + dimmable_light.state = 80 + await mock_niko_home_control_connection.register_callback.call_args_list[1][0][1]( + 80 + ) + await hass.async_block_till_done() + + assert hass.states.get("light.dimmable_light").state == STATE_ON + assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 204 + + dimmable_light.state = 0 + await mock_niko_home_control_connection.register_callback.call_args_list[1][0][1](0) + await hass.async_block_till_done() + + assert hass.states.get("light.dimmable_light").state == STATE_OFF + assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] is None From 8eebbd45bdc441225cd0439e447e4b8c4603512d Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 22:52:35 +0100 Subject: [PATCH 646/677] Bump pyOverkiz to 1.15.5 (#133835) --- homeassistant/components/overkiz/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 84fdc11ae47..3b093eb06ac 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -20,7 +20,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.15.4"], + "requirements": ["pyoverkiz==1.15.5"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 3133044fbe0..a02fe7f33ff 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2162,7 +2162,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.4 +pyoverkiz==1.15.5 # homeassistant.components.onewire pyownet==0.10.0.post1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3e024e0d40f..bbf04fbf2d6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1758,7 +1758,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.15.4 +pyoverkiz==1.15.5 # homeassistant.components.onewire pyownet==0.10.0.post1 From 0560b634e39a87ba2543e6b2b114488f44c04a50 Mon Sep 17 00:00:00 2001 From: Norbert Rittel Date: Sun, 22 Dec 2024 23:14:01 +0100 Subject: [PATCH 647/677] Make To-do action names and descriptions consistent with HA standard (#133734) --- homeassistant/components/todo/strings.json | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/todo/strings.json b/homeassistant/components/todo/strings.json index 245e5c82fc8..cffb22e89f0 100644 --- a/homeassistant/components/todo/strings.json +++ b/homeassistant/components/todo/strings.json @@ -7,8 +7,8 @@ }, "services": { "get_items": { - "name": "Get to-do list items", - "description": "Get items on a to-do list.", + "name": "Get items", + "description": "Gets items on a to-do list.", "fields": { "status": { "name": "Status", @@ -17,8 +17,8 @@ } }, "add_item": { - "name": "Add to-do list item", - "description": "Add a new to-do list item.", + "name": "Add item", + "description": "Adds a new to-do list item.", "fields": { "item": { "name": "Item name", @@ -39,8 +39,8 @@ } }, "update_item": { - "name": "Update to-do list item", - "description": "Update an existing to-do list item based on its name.", + "name": "Update item", + "description": "Updates an existing to-do list item based on its name.", "fields": { "item": { "name": "Item name", @@ -69,12 +69,12 @@ } }, "remove_completed_items": { - "name": "Remove all completed to-do list items", - "description": "Remove all to-do list items that have been completed." + "name": "Remove completed items", + "description": "Removes all to-do list items that have been completed." }, "remove_item": { - "name": "Remove a to-do list item", - "description": "Remove an existing to-do list item by its name.", + "name": "Remove item", + "description": "Removes an existing to-do list item by its name.", "fields": { "item": { "name": "Item name", From 74b425a06e54e7c86ff482f7e928dd2fbc7c5395 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Sun, 22 Dec 2024 23:20:01 +0100 Subject: [PATCH 648/677] Reload on connection lost for LCN integration (#133638) --- homeassistant/components/lcn/__init__.py | 28 ++++++++++++++++++++++++ tests/components/lcn/test_init.py | 17 ++++++++++++++ 2 files changed, 45 insertions(+) diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index a10d08ad073..7fbe7e7ac0e 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -14,6 +14,7 @@ from pypck.connection import ( PchkLcnNotConnectedError, PchkLicenseError, ) +from pypck.lcn_defs import LcnEvent from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -124,10 +125,12 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b # register for LCN bus messages device_registry = dr.async_get(hass) + event_received = partial(async_host_event_received, hass, config_entry) input_received = partial( async_host_input_received, hass, config_entry, device_registry ) + lcn_connection.register_for_events(event_received) lcn_connection.register_for_inputs(input_received) return True @@ -183,6 +186,31 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> return unload_ok +def async_host_event_received( + hass: HomeAssistant, config_entry: ConfigEntry, event: pypck.lcn_defs.LcnEvent +) -> None: + """Process received event from LCN.""" + lcn_connection = hass.data[DOMAIN][config_entry.entry_id][CONNECTION] + + async def reload_config_entry() -> None: + """Close connection and schedule config entry for reload.""" + await lcn_connection.async_close() + hass.config_entries.async_schedule_reload(config_entry.entry_id) + + if event in ( + LcnEvent.CONNECTION_LOST, + LcnEvent.PING_TIMEOUT, + ): + _LOGGER.info('The connection to host "%s" has been lost', config_entry.title) + hass.async_create_task(reload_config_entry()) + elif event == LcnEvent.BUS_DISCONNECTED: + _LOGGER.info( + 'The connection to the LCN bus via host "%s" has been disconnected', + config_entry.title, + ) + hass.async_create_task(reload_config_entry()) + + def async_host_input_received( hass: HomeAssistant, config_entry: ConfigEntry, diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index bffa91d14ef..4bb8d023d3f 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -9,6 +9,7 @@ from pypck.connection import ( PchkLcnNotConnectedError, PchkLicenseError, ) +from pypck.lcn_defs import LcnEvent import pytest from homeassistant import config_entries @@ -116,6 +117,22 @@ async def test_async_setup_entry_fails( assert entry.state is ConfigEntryState.SETUP_RETRY +@pytest.mark.parametrize( + "event", + [LcnEvent.CONNECTION_LOST, LcnEvent.PING_TIMEOUT, LcnEvent.BUS_DISCONNECTED], +) +async def test_async_entry_reload_on_host_event_received( + hass: HomeAssistant, entry: MockConfigEntry, event: LcnEvent +) -> None: + """Test for config entry reload on certain host event received.""" + lcn_connection = await init_integration(hass, entry) + with patch( + "homeassistant.config_entries.ConfigEntries.async_schedule_reload" + ) as async_schedule_reload: + lcn_connection.fire_event(event) + async_schedule_reload.assert_called_with(entry.entry_id) + + @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_migrate_1_1(hass: HomeAssistant, entry) -> None: """Test migration config entry.""" From a3657a0fef0fca03ff497bf306eda1bbee917b30 Mon Sep 17 00:00:00 2001 From: jb101010-2 <168106462+jb101010-2@users.noreply.github.com> Date: Sun, 22 Dec 2024 23:21:52 +0100 Subject: [PATCH 649/677] Suez_water: fix yesterday sensor extra_state invalid typing (#133425) --- .../components/suez_water/coordinator.py | 30 ++++++++++--------- homeassistant/components/suez_water/sensor.py | 10 +++---- tests/components/suez_water/conftest.py | 17 ++++++----- tests/components/suez_water/test_sensor.py | 8 +++++ 4 files changed, 38 insertions(+), 27 deletions(-) diff --git a/homeassistant/components/suez_water/coordinator.py b/homeassistant/components/suez_water/coordinator.py index 72da68c0f5d..aab1ba110b7 100644 --- a/homeassistant/components/suez_water/coordinator.py +++ b/homeassistant/components/suez_water/coordinator.py @@ -1,9 +1,7 @@ """Suez water update coordinator.""" -from collections.abc import Mapping from dataclasses import dataclass from datetime import date -from typing import Any from pysuez import PySuezError, SuezClient @@ -20,11 +18,11 @@ from .const import CONF_COUNTER_ID, DATA_REFRESH_INTERVAL, DOMAIN class SuezWaterAggregatedAttributes: """Class containing aggregated sensor extra attributes.""" - this_month_consumption: dict[date, float] - previous_month_consumption: dict[date, float] + this_month_consumption: dict[str, float] + previous_month_consumption: dict[str, float] last_year_overall: dict[str, float] this_year_overall: dict[str, float] - history: dict[date, float] + history: dict[str, float] highest_monthly_consumption: float @@ -33,7 +31,7 @@ class SuezWaterData: """Class used to hold all fetch data from suez api.""" aggregated_value: float - aggregated_attr: Mapping[str, Any] + aggregated_attr: SuezWaterAggregatedAttributes price: float @@ -68,18 +66,22 @@ class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]): async def _async_update_data(self) -> SuezWaterData: """Fetch data from API endpoint.""" + + def map_dict(param: dict[date, float]) -> dict[str, float]: + return {str(key): value for key, value in param.items()} + try: aggregated = await self._suez_client.fetch_aggregated_data() data = SuezWaterData( aggregated_value=aggregated.value, - aggregated_attr={ - "this_month_consumption": aggregated.current_month, - "previous_month_consumption": aggregated.previous_month, - "highest_monthly_consumption": aggregated.highest_monthly_consumption, - "last_year_overall": aggregated.previous_year, - "this_year_overall": aggregated.current_year, - "history": aggregated.history, - }, + aggregated_attr=SuezWaterAggregatedAttributes( + this_month_consumption=map_dict(aggregated.current_month), + previous_month_consumption=map_dict(aggregated.previous_month), + highest_monthly_consumption=aggregated.highest_monthly_consumption, + last_year_overall=aggregated.previous_year, + this_year_overall=aggregated.current_year, + history=map_dict(aggregated.history), + ), price=(await self._suez_client.get_price()).price, ) except PySuezError as err: diff --git a/homeassistant/components/suez_water/sensor.py b/homeassistant/components/suez_water/sensor.py index e4e53dd7f6d..1152ebd551b 100644 --- a/homeassistant/components/suez_water/sensor.py +++ b/homeassistant/components/suez_water/sensor.py @@ -2,8 +2,8 @@ from __future__ import annotations -from collections.abc import Callable, Mapping -from dataclasses import dataclass +from collections.abc import Callable +from dataclasses import asdict, dataclass from typing import Any from pysuez.const import ATTRIBUTION @@ -28,7 +28,7 @@ class SuezWaterSensorEntityDescription(SensorEntityDescription): """Describes Suez water sensor entity.""" value_fn: Callable[[SuezWaterData], float | str | None] - attr_fn: Callable[[SuezWaterData], Mapping[str, Any] | None] = lambda _: None + attr_fn: Callable[[SuezWaterData], dict[str, Any] | None] = lambda _: None SENSORS: tuple[SuezWaterSensorEntityDescription, ...] = ( @@ -38,7 +38,7 @@ SENSORS: tuple[SuezWaterSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfVolume.LITERS, device_class=SensorDeviceClass.WATER, value_fn=lambda suez_data: suez_data.aggregated_value, - attr_fn=lambda suez_data: suez_data.aggregated_attr, + attr_fn=lambda suez_data: asdict(suez_data.aggregated_attr), ), SuezWaterSensorEntityDescription( key="water_price", @@ -93,6 +93,6 @@ class SuezWaterSensor(CoordinatorEntity[SuezWaterCoordinator], SensorEntity): return self.entity_description.value_fn(self.coordinator.data) @property - def extra_state_attributes(self) -> Mapping[str, Any] | None: + def extra_state_attributes(self) -> dict[str, Any] | None: """Return extra state of the sensor.""" return self.entity_description.attr_fn(self.coordinator.data) diff --git a/tests/components/suez_water/conftest.py b/tests/components/suez_water/conftest.py index f634a053c65..b034d9b00fa 100644 --- a/tests/components/suez_water/conftest.py +++ b/tests/components/suez_water/conftest.py @@ -1,6 +1,7 @@ """Common fixtures for the Suez Water tests.""" from collections.abc import Generator +from datetime import date from unittest.mock import AsyncMock, patch from pysuez import AggregatedData, PriceResult @@ -56,22 +57,22 @@ def mock_suez_client() -> Generator[AsyncMock]: result = AggregatedData( value=160, current_month={ - "2024-01-01": 130, - "2024-01-02": 145, + date.fromisoformat("2024-01-01"): 130, + date.fromisoformat("2024-01-02"): 145, }, previous_month={ - "2024-12-01": 154, - "2024-12-02": 166, + date.fromisoformat("2024-12-01"): 154, + date.fromisoformat("2024-12-02"): 166, }, current_year=1500, previous_year=1000, attribution=ATTRIBUTION, highest_monthly_consumption=2558, history={ - "2024-01-01": 130, - "2024-01-02": 145, - "2024-12-01": 154, - "2024-12-02": 166, + date.fromisoformat("2024-01-01"): 130, + date.fromisoformat("2024-01-02"): 145, + date.fromisoformat("2024-12-01"): 154, + date.fromisoformat("2024-12-02"): 166, }, ) diff --git a/tests/components/suez_water/test_sensor.py b/tests/components/suez_water/test_sensor.py index cb578432f62..950d5d8393d 100644 --- a/tests/components/suez_water/test_sensor.py +++ b/tests/components/suez_water/test_sensor.py @@ -1,5 +1,6 @@ """Test Suez_water sensor platform.""" +from datetime import date from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory @@ -32,6 +33,13 @@ async def test_sensors_valid_state( assert mock_config_entry.state is ConfigEntryState.LOADED await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + state = hass.states.get("sensor.suez_mock_device_water_usage_yesterday") + assert state + previous: dict = state.attributes["previous_month_consumption"] + assert previous + assert previous.get(date.fromisoformat("2024-12-01")) is None + assert previous.get(str(date.fromisoformat("2024-12-01"))) == 154 + @pytest.mark.parametrize("method", [("fetch_aggregated_data"), ("get_price")]) async def test_sensors_failed_update( From 4ed0c21a4a605f23c18ee0dd24fb01437dec74b1 Mon Sep 17 00:00:00 2001 From: Mick Vleeshouwer Date: Sun, 22 Dec 2024 23:35:50 +0100 Subject: [PATCH 650/677] Add data descriptions to Config Flow in Overkiz (#133758) --- homeassistant/components/overkiz/strings.json | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/overkiz/strings.json b/homeassistant/components/overkiz/strings.json index 1595cd52aeb..0c564a003d6 100644 --- a/homeassistant/components/overkiz/strings.json +++ b/homeassistant/components/overkiz/strings.json @@ -6,12 +6,18 @@ "description": "Select your server. The Overkiz platform is used by various vendors like Somfy (Connexoon / TaHoma), Hitachi (Hi Kumo) and Atlantic (Cozytouch).", "data": { "hub": "Server" + }, + "data_description": { + "hub": "Select the mobile app that you use to control your devices." } }, "local_or_cloud": { - "description": "Choose between local or cloud API. Local API supports TaHoma Connexoon, TaHoma v2, and TaHoma Switch. Climate devices and scenarios are not supported in local API.", + "description": "Choose how you want to connect to your gateway.", "data": { "api_type": "API type" + }, + "data_description": { + "api_type": "Local API is only supported by TaHoma Connexoon, TaHoma v2, and TaHoma Switch. Climate devices and scenarios are **not** available via the local API." } }, "cloud": { @@ -19,6 +25,10 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "The username of your cloud account (app).", + "password": "The password of your cloud account (app)." } }, "local": { @@ -28,6 +38,12 @@ "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "host": "The hostname or IP address of your Overkiz hub.", + "username": "The username of your cloud account (app).", + "password": "The password of your cloud account (app).", + "verify_ssl": "Verify the SSL certificate. Select this only if you are connecting via the hostname." } } }, From 8ab936b87c9f83c69849d14ca00294a2094d40cf Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sun, 22 Dec 2024 22:54:44 +0000 Subject: [PATCH 651/677] Add detection switches to tplink integration (#133828) --- homeassistant/components/tplink/icons.json | 24 +++ homeassistant/components/tplink/strings.json | 12 ++ homeassistant/components/tplink/switch.py | 12 ++ .../components/tplink/fixtures/features.json | 20 ++ .../tplink/snapshots/test_switch.ambr | 184 ++++++++++++++++++ 5 files changed, 252 insertions(+) diff --git a/homeassistant/components/tplink/icons.json b/homeassistant/components/tplink/icons.json index 3f3a3b1233b..9cc0326b59f 100644 --- a/homeassistant/components/tplink/icons.json +++ b/homeassistant/components/tplink/icons.json @@ -89,6 +89,30 @@ "state": { "on": "mdi:motion-sensor" } + }, + "motion_detection": { + "default": "mdi:motion-sensor-off", + "state": { + "on": "mdi:motion-sensor" + } + }, + "person_detection": { + "default": "mdi:account-off", + "state": { + "on": "mdi:account" + } + }, + "tamper_detection": { + "default": "mdi:shield-off", + "state": { + "on": "mdi:shield" + } + }, + "baby_cry_detection": { + "default": "mdi:baby-face-outline", + "state": { + "on": "mdi:baby-face" + } } }, "sensor": { diff --git a/homeassistant/components/tplink/strings.json b/homeassistant/components/tplink/strings.json index 5aa7c37d612..664d52c16af 100644 --- a/homeassistant/components/tplink/strings.json +++ b/homeassistant/components/tplink/strings.json @@ -233,6 +233,18 @@ }, "pir_enabled": { "name": "Motion sensor" + }, + "motion_detection": { + "name": "Motion detection" + }, + "person_detection": { + "name": "Person detection" + }, + "tamper_detection": { + "name": "Tamper detection" + }, + "baby_cry_detection": { + "name": "Baby cry detection" } }, "number": { diff --git a/homeassistant/components/tplink/switch.py b/homeassistant/components/tplink/switch.py index 7e223752665..28dedc7e7a1 100644 --- a/homeassistant/components/tplink/switch.py +++ b/homeassistant/components/tplink/switch.py @@ -54,6 +54,18 @@ SWITCH_DESCRIPTIONS: tuple[TPLinkSwitchEntityDescription, ...] = ( TPLinkSwitchEntityDescription( key="pir_enabled", ), + TPLinkSwitchEntityDescription( + key="motion_detection", + ), + TPLinkSwitchEntityDescription( + key="person_detection", + ), + TPLinkSwitchEntityDescription( + key="tamper_detection", + ), + TPLinkSwitchEntityDescription( + key="baby_cry_detection", + ), ) SWITCH_DESCRIPTIONS_MAP = {desc.key: desc for desc in SWITCH_DESCRIPTIONS} diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index a54edf56c62..3d27e63b06a 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -44,6 +44,26 @@ "type": "Switch", "category": "Config" }, + "motion_detection": { + "value": true, + "type": "Switch", + "category": "Primary" + }, + "person_detection": { + "value": true, + "type": "Switch", + "category": "Primary" + }, + "tamper_detection": { + "value": true, + "type": "Switch", + "category": "Primary" + }, + "baby_cry_detection": { + "value": true, + "type": "Switch", + "category": "Primary" + }, "current_consumption": { "value": 5.23, "type": "Sensor", diff --git a/tests/components/tplink/snapshots/test_switch.ambr b/tests/components/tplink/snapshots/test_switch.ambr index 36c630474c8..7adda900c02 100644 --- a/tests/components/tplink/snapshots/test_switch.ambr +++ b/tests/components/tplink/snapshots/test_switch.ambr @@ -173,6 +173,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_baby_cry_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.my_device_baby_cry_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Baby cry detection', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'baby_cry_detection', + 'unique_id': '123456789ABCDEFGH_baby_cry_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_baby_cry_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Baby cry detection', + }), + 'context': , + 'entity_id': 'switch.my_device_baby_cry_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_child_lock-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -311,6 +357,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_motion_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.my_device_motion_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion detection', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion_detection', + 'unique_id': '123456789ABCDEFGH_motion_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_motion_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Motion detection', + }), + 'context': , + 'entity_id': 'switch.my_device_motion_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_motion_sensor-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -357,6 +449,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_person_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.my_device_person_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Person detection', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'person_detection', + 'unique_id': '123456789ABCDEFGH_person_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_person_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Person detection', + }), + 'context': , + 'entity_id': 'switch.my_device_person_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_smooth_transitions-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -403,3 +541,49 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_tamper_detection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.my_device_tamper_detection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tamper detection', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tamper_detection', + 'unique_id': '123456789ABCDEFGH_tamper_detection', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_tamper_detection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Tamper detection', + }), + 'context': , + 'entity_id': 'switch.my_device_tamper_detection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- From df261660474dbdec780aa534fa0f764d1a2ba3c3 Mon Sep 17 00:00:00 2001 From: Raphael Hehl <7577984+RaHehl@users.noreply.github.com> Date: Sun, 22 Dec 2024 23:58:13 +0100 Subject: [PATCH 652/677] Unifiprotect: add error message if the get_user_keyring_info permissions are not sufficient (#133841) --- .../components/unifiprotect/services.py | 3 ++ .../components/unifiprotect/test_services.py | 31 +++++++++++++++++-- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/unifiprotect/services.py b/homeassistant/components/unifiprotect/services.py index 6a1daef178e..402aae2eeba 100644 --- a/homeassistant/components/unifiprotect/services.py +++ b/homeassistant/components/unifiprotect/services.py @@ -236,6 +236,9 @@ async def get_user_keyring_info(call: ServiceCall) -> ServiceResponse: """Get the user keyring info.""" camera = _async_get_ufp_camera(call) ulp_users = camera.api.bootstrap.ulp_users.as_list() + if not ulp_users: + raise HomeAssistantError("No users found, please check Protect permissions.") + user_keyrings: list[JsonValueType] = [ { KEYRINGS_USER_FULL_NAME: user.full_name, diff --git a/tests/components/unifiprotect/test_services.py b/tests/components/unifiprotect/test_services.py index efc9d1ace9e..9697d1f11a4 100644 --- a/tests/components/unifiprotect/test_services.py +++ b/tests/components/unifiprotect/test_services.py @@ -262,13 +262,13 @@ async def test_remove_privacy_zone( @pytest.mark.asyncio -async def test_get_doorbell_user( +async def get_user_keyring_info( hass: HomeAssistant, entity_registry: er.EntityRegistry, ufp: MockUFPFixture, doorbell: Camera, ) -> None: - """Test get_doorbell_user service.""" + """Test get_user_keyring_info service.""" ulp_user = Mock(full_name="Test User", status="active", ulp_id="user_ulp_id") keyring = Mock( @@ -315,3 +315,30 @@ async def test_get_doorbell_user( }, ], } + + +async def test_get_user_keyring_info_no_users( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + ufp: MockUFPFixture, + doorbell: Camera, +) -> None: + """Test get_user_keyring_info service with no users.""" + + ufp.api.bootstrap.ulp_users.as_list = Mock(return_value=[]) + ufp.api.bootstrap.keyrings.as_list = Mock(return_value=[]) + + await init_entry(hass, ufp, [doorbell]) + + camera_entry = entity_registry.async_get("binary_sensor.test_camera_doorbell") + + with pytest.raises( + HomeAssistantError, match="No users found, please check Protect permissions." + ): + await hass.services.async_call( + DOMAIN, + SERVICE_GET_USER_KEYRING_INFO, + {ATTR_DEVICE_ID: camera_entry.device_id}, + blocking=True, + return_response=True, + ) From 00a1ae0eeb83eeafd81c424391a28c2c5c788c6c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 12:58:39 -1000 Subject: [PATCH 653/677] Bump protobuf to 5.29.2 (#133839) --- homeassistant/package_constraints.txt | 2 +- requirements_test.txt | 2 +- script/gen_requirements_all.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index d5731041d08..b149c4dafb8 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -145,7 +145,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==5.28.3 +protobuf==5.29.2 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder diff --git a/requirements_test.txt b/requirements_test.txt index e8561eba0a5..2a6841ada2a 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -43,7 +43,7 @@ types-chardet==0.1.5 types-decorator==5.1.8.20240310 types-paho-mqtt==1.6.0.20240321 types-pillow==10.2.0.20240822 -types-protobuf==5.28.3.20241030 +types-protobuf==5.29.1.20241207 types-psutil==6.1.0.20241102 types-python-dateutil==2.9.0.20241003 types-python-slugify==8.0.2.20240310 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 71229d0b57d..c447c64f655 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -178,7 +178,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==5.28.3 +protobuf==5.29.2 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder From 353f0854748356697bd85d65aa0e8038cd7b4269 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 13:05:51 -1000 Subject: [PATCH 654/677] Bump anyio to 4.7.0 (#133842) --- homeassistant/package_constraints.txt | 2 +- script/gen_requirements_all.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b149c4dafb8..6863da50af3 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -106,7 +106,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.6.2.post1 +anyio==4.7.0 h11==0.14.0 httpcore==1.0.5 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index c447c64f655..86179ac228f 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -139,7 +139,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.6.2.post1 +anyio==4.7.0 h11==0.14.0 httpcore==1.0.5 From 67f0de441b489890efa802a325f187b761098ad6 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Mon, 23 Dec 2024 00:06:01 +0100 Subject: [PATCH 655/677] Fulfill IQS rule runtime-data in ViCare integration (#133633) --- homeassistant/components/vicare/__init__.py | 63 ++++++------------- .../components/vicare/binary_sensor.py | 10 +-- homeassistant/components/vicare/button.py | 10 +-- homeassistant/components/vicare/climate.py | 12 ++-- .../components/vicare/config_flow.py | 8 +-- homeassistant/components/vicare/const.py | 2 +- .../components/vicare/diagnostics.py | 9 ++- homeassistant/components/vicare/fan.py | 11 +--- homeassistant/components/vicare/number.py | 15 ++--- .../components/vicare/quality_scale.yaml | 4 +- homeassistant/components/vicare/sensor.py | 11 +--- homeassistant/components/vicare/types.py | 13 ++++ homeassistant/components/vicare/utils.py | 40 ++++++++++-- .../components/vicare/water_heater.py | 10 +-- tests/components/vicare/conftest.py | 4 +- tests/components/vicare/test_binary_sensor.py | 2 +- tests/components/vicare/test_button.py | 2 +- tests/components/vicare/test_climate.py | 2 +- tests/components/vicare/test_config_flow.py | 12 ++-- tests/components/vicare/test_fan.py | 2 +- tests/components/vicare/test_init.py | 2 +- tests/components/vicare/test_number.py | 2 +- tests/components/vicare/test_sensor.py | 4 +- tests/components/vicare/test_water_heater.py | 2 +- 24 files changed, 121 insertions(+), 131 deletions(-) diff --git a/homeassistant/components/vicare/__init__.py b/homeassistant/components/vicare/__init__.py index d6b9e4b923a..9c331f0e9ec 100644 --- a/homeassistant/components/vicare/__init__.py +++ b/homeassistant/components/vicare/__init__.py @@ -2,11 +2,9 @@ from __future__ import annotations -from collections.abc import Mapping from contextlib import suppress import logging import os -from typing import Any from PyViCare.PyViCare import PyViCare from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig @@ -16,8 +14,6 @@ from PyViCare.PyViCareUtils import ( ) from homeassistant.components.climate import DOMAIN as DOMAIN_CLIMATE -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -25,31 +21,28 @@ from homeassistant.helpers.storage import STORAGE_DIR from .const import ( DEFAULT_CACHE_DURATION, - DEVICE_LIST, DOMAIN, PLATFORMS, UNSUPPORTED_DEVICES, + VICARE_TOKEN_FILENAME, ) -from .types import ViCareDevice -from .utils import get_device, get_device_serial +from .types import ViCareConfigEntry, ViCareData, ViCareDevice +from .utils import get_device, get_device_serial, login _LOGGER = logging.getLogger(__name__) -_TOKEN_FILENAME = "vicare_token.save" -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ViCareConfigEntry) -> bool: """Set up from config entry.""" _LOGGER.debug("Setting up ViCare component") - - hass.data[DOMAIN] = {} - hass.data[DOMAIN][entry.entry_id] = {} - try: - await hass.async_add_executor_job(setup_vicare_api, hass, entry) + entry.runtime_data = await hass.async_add_executor_job( + setup_vicare_api, hass, entry + ) except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError) as err: raise ConfigEntryAuthFailed("Authentication failed") from err - for device in hass.data[DOMAIN][entry.entry_id][DEVICE_LIST]: + for device in entry.runtime_data.devices: # Migration can be removed in 2025.4.0 await async_migrate_devices_and_entities(hass, entry, device) @@ -58,28 +51,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -def vicare_login( - hass: HomeAssistant, - entry_data: Mapping[str, Any], - cache_duration=DEFAULT_CACHE_DURATION, -) -> PyViCare: - """Login via PyVicare API.""" - vicare_api = PyViCare() - vicare_api.setCacheDuration(cache_duration) - vicare_api.initWithCredentials( - entry_data[CONF_USERNAME], - entry_data[CONF_PASSWORD], - entry_data[CONF_CLIENT_ID], - hass.config.path(STORAGE_DIR, _TOKEN_FILENAME), - ) - return vicare_api - - -def setup_vicare_api(hass: HomeAssistant, entry: ConfigEntry) -> None: +def setup_vicare_api(hass: HomeAssistant, entry: ViCareConfigEntry) -> PyViCare: """Set up PyVicare API.""" - vicare_api = vicare_login(hass, entry.data) + client = login(hass, entry.data) - device_config_list = get_supported_devices(vicare_api.devices) + device_config_list = get_supported_devices(client.devices) + + # increase cache duration to fit rate limit to number of devices if (number_of_devices := len(device_config_list)) > 1: cache_duration = DEFAULT_CACHE_DURATION * number_of_devices _LOGGER.debug( @@ -87,36 +65,35 @@ def setup_vicare_api(hass: HomeAssistant, entry: ConfigEntry) -> None: number_of_devices, cache_duration, ) - vicare_api = vicare_login(hass, entry.data, cache_duration) - device_config_list = get_supported_devices(vicare_api.devices) + client = login(hass, entry.data, cache_duration) + device_config_list = get_supported_devices(client.devices) for device in device_config_list: _LOGGER.debug( "Found device: %s (online: %s)", device.getModel(), str(device.isOnline()) ) - hass.data[DOMAIN][entry.entry_id][DEVICE_LIST] = [ + devices = [ ViCareDevice(config=device_config, api=get_device(entry, device_config)) for device_config in device_config_list ] + return ViCareData(client=client, devices=devices) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: ViCareConfigEntry) -> bool: """Unload ViCare config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) with suppress(FileNotFoundError): await hass.async_add_executor_job( - os.remove, hass.config.path(STORAGE_DIR, _TOKEN_FILENAME) + os.remove, hass.config.path(STORAGE_DIR, VICARE_TOKEN_FILENAME) ) return unload_ok async def async_migrate_devices_and_entities( - hass: HomeAssistant, entry: ConfigEntry, device: ViCareDevice + hass: HomeAssistant, entry: ViCareConfigEntry, device: ViCareDevice ) -> None: """Migrate old entry.""" device_registry = dr.async_get(hass) diff --git a/homeassistant/components/vicare/binary_sensor.py b/homeassistant/components/vicare/binary_sensor.py index 55f0ab96ed0..ced02dae97e 100644 --- a/homeassistant/components/vicare/binary_sensor.py +++ b/homeassistant/components/vicare/binary_sensor.py @@ -24,13 +24,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import ViCareDevice, ViCareRequiredKeysMixin +from .types import ViCareConfigEntry, ViCareDevice, ViCareRequiredKeysMixin from .utils import ( get_burners, get_circuits, @@ -152,16 +150,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the ViCare binary sensor devices.""" - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/homeassistant/components/vicare/button.py b/homeassistant/components/vicare/button.py index 49d142c1edb..ad7d600eba3 100644 --- a/homeassistant/components/vicare/button.py +++ b/homeassistant/components/vicare/button.py @@ -16,14 +16,12 @@ from PyViCare.PyViCareUtils import ( import requests from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import ViCareDevice, ViCareRequiredKeysMixinWithSet +from .types import ViCareConfigEntry, ViCareDevice, ViCareRequiredKeysMixinWithSet from .utils import get_device_serial, is_supported _LOGGER = logging.getLogger(__name__) @@ -67,16 +65,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the ViCare button entities.""" - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index 67330bf201d..62231a4e2fe 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -24,7 +24,6 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_TEMPERATURE, PRECISION_TENTHS, @@ -37,9 +36,9 @@ from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DEVICE_LIST, DOMAIN +from .const import DOMAIN from .entity import ViCareEntity -from .types import HeatingProgram, ViCareDevice +from .types import HeatingProgram, ViCareConfigEntry, ViCareDevice from .utils import get_burners, get_circuits, get_compressors, get_device_serial _LOGGER = logging.getLogger(__name__) @@ -99,25 +98,22 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the ViCare climate platform.""" platform = entity_platform.async_get_current_platform() - platform.async_register_entity_service( SERVICE_SET_VICARE_MODE, {vol.Required(SERVICE_SET_VICARE_MODE_ATTR_MODE): cv.string}, "set_vicare_mode", ) - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/homeassistant/components/vicare/config_flow.py b/homeassistant/components/vicare/config_flow.py index c711cc06074..6594e6ec9e4 100644 --- a/homeassistant/components/vicare/config_flow.py +++ b/homeassistant/components/vicare/config_flow.py @@ -18,7 +18,6 @@ from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import format_mac -from . import vicare_login from .const import ( CONF_HEATING_TYPE, DEFAULT_HEATING_TYPE, @@ -26,6 +25,7 @@ from .const import ( VICARE_NAME, HeatingType, ) +from .utils import login _LOGGER = logging.getLogger(__name__) @@ -62,9 +62,7 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: try: - await self.hass.async_add_executor_job( - vicare_login, self.hass, user_input - ) + await self.hass.async_add_executor_job(login, self.hass, user_input) except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError): errors["base"] = "invalid_auth" else: @@ -96,7 +94,7 @@ class ViCareConfigFlow(ConfigFlow, domain=DOMAIN): } try: - await self.hass.async_add_executor_job(vicare_login, self.hass, data) + await self.hass.async_add_executor_job(login, self.hass, data) except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError): errors["base"] = "invalid_auth" else: diff --git a/homeassistant/components/vicare/const.py b/homeassistant/components/vicare/const.py index 828a879927d..bcf41223d3f 100644 --- a/homeassistant/components/vicare/const.py +++ b/homeassistant/components/vicare/const.py @@ -25,8 +25,8 @@ UNSUPPORTED_DEVICES = [ "E3_RoomControl_One_522", ] -DEVICE_LIST = "device_list" VICARE_NAME = "ViCare" +VICARE_TOKEN_FILENAME = "vicare_token.save" CONF_CIRCUIT = "circuit" CONF_HEATING_TYPE = "heating_type" diff --git a/homeassistant/components/vicare/diagnostics.py b/homeassistant/components/vicare/diagnostics.py index 9182e96509f..7695c304451 100644 --- a/homeassistant/components/vicare/diagnostics.py +++ b/homeassistant/components/vicare/diagnostics.py @@ -6,25 +6,24 @@ import json from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from .const import DEVICE_LIST, DOMAIN +from .types import ViCareConfigEntry TO_REDACT = {CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: ViCareConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" def dump_devices() -> list[dict[str, Any]]: """Dump devices.""" return [ - json.loads(device.config.dump_secure()) - for device in hass.data[DOMAIN][entry.entry_id][DEVICE_LIST] + json.loads(device.dump_secure()) + for device in entry.runtime_data.client.devices ] return { diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py index 6e8513a1f7e..69aa8396fea 100644 --- a/homeassistant/components/vicare/fan.py +++ b/homeassistant/components/vicare/fan.py @@ -19,7 +19,6 @@ from PyViCare.PyViCareVentilationDevice import ( from requests.exceptions import ConnectionError as RequestConnectionError from homeassistant.components.fan import FanEntity, FanEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.percentage import ( @@ -27,9 +26,8 @@ from homeassistant.util.percentage import ( percentage_to_ordered_list_item, ) -from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import ViCareDevice +from .types import ViCareConfigEntry, ViCareDevice from .utils import get_device_serial _LOGGER = logging.getLogger(__name__) @@ -104,17 +102,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the ViCare fan platform.""" - - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py index f9af9636941..8ffaa727634 100644 --- a/homeassistant/components/vicare/number.py +++ b/homeassistant/components/vicare/number.py @@ -25,14 +25,17 @@ from homeassistant.components.number import ( NumberEntity, NumberEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import HeatingProgram, ViCareDevice, ViCareRequiredKeysMixin +from .types import ( + HeatingProgram, + ViCareConfigEntry, + ViCareDevice, + ViCareRequiredKeysMixin, +) from .utils import get_circuits, get_device_serial, is_supported _LOGGER = logging.getLogger(__name__) @@ -370,16 +373,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the ViCare number devices.""" - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/homeassistant/components/vicare/quality_scale.yaml b/homeassistant/components/vicare/quality_scale.yaml index 959e2e90583..35a1e7b0adb 100644 --- a/homeassistant/components/vicare/quality_scale.yaml +++ b/homeassistant/components/vicare/quality_scale.yaml @@ -6,9 +6,7 @@ rules: status: todo comment: Uniqueness is not checked yet. config-flow-test-coverage: done - runtime-data: - status: todo - comment: runtime_data is not used yet. + runtime-data: done test-before-setup: done appropriate-polling: done entity-unique-id: done diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 57b7c0bec9a..3386c849f74 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -25,7 +25,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, EntityCategory, @@ -40,8 +39,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( - DEVICE_LIST, - DOMAIN, VICARE_CUBIC_METER, VICARE_KW, VICARE_KWH, @@ -50,7 +47,7 @@ from .const import ( VICARE_WH, ) from .entity import ViCareEntity -from .types import ViCareDevice, ViCareRequiredKeysMixin +from .types import ViCareConfigEntry, ViCareDevice, ViCareRequiredKeysMixin from .utils import ( get_burners, get_circuits, @@ -968,16 +965,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the ViCare sensor devices.""" - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ), # run update to have device_class set depending on unit_of_measurement True, diff --git a/homeassistant/components/vicare/types.py b/homeassistant/components/vicare/types.py index 98d1c0566ce..65ae2a53c3e 100644 --- a/homeassistant/components/vicare/types.py +++ b/homeassistant/components/vicare/types.py @@ -6,6 +6,7 @@ from dataclasses import dataclass import enum from typing import Any +from PyViCare.PyViCare import PyViCare from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig @@ -15,6 +16,7 @@ from homeassistant.components.climate import ( PRESET_HOME, PRESET_SLEEP, ) +from homeassistant.config_entries import ConfigEntry class HeatingProgram(enum.StrEnum): @@ -80,6 +82,17 @@ class ViCareDevice: api: PyViCareDevice +@dataclass(frozen=True) +class ViCareData: + """ViCare data class.""" + + client: PyViCare + devices: list[ViCareDevice] + + +type ViCareConfigEntry = ConfigEntry[ViCareData] + + @dataclass(frozen=True) class ViCareRequiredKeysMixin: """Mixin for required keys.""" diff --git a/homeassistant/components/vicare/utils.py b/homeassistant/components/vicare/utils.py index 5156ea4a41e..120dad83113 100644 --- a/homeassistant/components/vicare/utils.py +++ b/homeassistant/components/vicare/utils.py @@ -1,7 +1,12 @@ """ViCare helpers functions.""" -import logging +from __future__ import annotations +from collections.abc import Mapping +import logging +from typing import Any + +from PyViCare.PyViCare import PyViCare from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareHeatingDevice import ( @@ -14,16 +19,41 @@ from PyViCare.PyViCareUtils import ( ) import requests -from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.helpers.storage import STORAGE_DIR -from .const import CONF_HEATING_TYPE, HEATING_TYPE_TO_CREATOR_METHOD, HeatingType -from .types import ViCareRequiredKeysMixin +from .const import ( + CONF_HEATING_TYPE, + DEFAULT_CACHE_DURATION, + HEATING_TYPE_TO_CREATOR_METHOD, + VICARE_TOKEN_FILENAME, + HeatingType, +) +from .types import ViCareConfigEntry, ViCareRequiredKeysMixin _LOGGER = logging.getLogger(__name__) +def login( + hass: HomeAssistant, + entry_data: Mapping[str, Any], + cache_duration=DEFAULT_CACHE_DURATION, +) -> PyViCare: + """Login via PyVicare API.""" + vicare_api = PyViCare() + vicare_api.setCacheDuration(cache_duration) + vicare_api.initWithCredentials( + entry_data[CONF_USERNAME], + entry_data[CONF_PASSWORD], + entry_data[CONF_CLIENT_ID], + hass.config.path(STORAGE_DIR, VICARE_TOKEN_FILENAME), + ) + return vicare_api + + def get_device( - entry: ConfigEntry, device_config: PyViCareDeviceConfig + entry: ViCareConfigEntry, device_config: PyViCareDeviceConfig ) -> PyViCareDevice: """Get device for device config.""" return getattr( diff --git a/homeassistant/components/vicare/water_heater.py b/homeassistant/components/vicare/water_heater.py index 5e241c9a3be..114ff620c3f 100644 --- a/homeassistant/components/vicare/water_heater.py +++ b/homeassistant/components/vicare/water_heater.py @@ -20,14 +20,12 @@ from homeassistant.components.water_heater import ( WaterHeaterEntity, WaterHeaterEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import ViCareDevice +from .types import ViCareConfigEntry, ViCareDevice from .utils import get_circuits, get_device_serial _LOGGER = logging.getLogger(__name__) @@ -81,16 +79,14 @@ def _build_entities( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ViCareConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the ViCare water heater platform.""" - device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] - async_add_entities( await hass.async_add_executor_job( _build_entities, - device_list, + config_entry.runtime_data.devices, ) ) diff --git a/tests/components/vicare/conftest.py b/tests/components/vicare/conftest.py index aadf85e7081..8e10d2f1a25 100644 --- a/tests/components/vicare/conftest.py +++ b/tests/components/vicare/conftest.py @@ -84,7 +84,7 @@ async def mock_vicare_gas_boiler( """Return a mocked ViCare API representing a single gas boiler device.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with patch( - f"{MODULE}.vicare_login", + f"{MODULE}.login", return_value=MockPyViCare(fixtures), ): await setup_integration(hass, mock_config_entry) @@ -102,7 +102,7 @@ async def mock_vicare_room_sensors( Fixture({"type:climateSensor"}, "vicare/RoomSensor2.json"), ] with patch( - f"{MODULE}.vicare_login", + f"{MODULE}.login", return_value=MockPyViCare(fixtures), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_binary_sensor.py b/tests/components/vicare/test_binary_sensor.py index b9b8a57a59b..44612673a11 100644 --- a/tests/components/vicare/test_binary_sensor.py +++ b/tests/components/vicare/test_binary_sensor.py @@ -43,7 +43,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.BINARY_SENSOR]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_button.py b/tests/components/vicare/test_button.py index c024af41d78..cdc47e3833d 100644 --- a/tests/components/vicare/test_button.py +++ b/tests/components/vicare/test_button.py @@ -25,7 +25,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.BUTTON]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_climate.py b/tests/components/vicare/test_climate.py index 44df87276e7..f48a8988cf0 100644 --- a/tests/components/vicare/test_climate.py +++ b/tests/components/vicare/test_climate.py @@ -25,7 +25,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_config_flow.py b/tests/components/vicare/test_config_flow.py index a522cf75d5d..d44fd1b9fed 100644 --- a/tests/components/vicare/test_config_flow.py +++ b/tests/components/vicare/test_config_flow.py @@ -49,7 +49,7 @@ async def test_user_create_entry( # test PyViCareInvalidConfigurationError with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", side_effect=PyViCareInvalidConfigurationError( {"error": "foo", "error_description": "bar"} ), @@ -65,7 +65,7 @@ async def test_user_create_entry( # test PyViCareInvalidCredentialsError with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", side_effect=PyViCareInvalidCredentialsError, ): result = await hass.config_entries.flow.async_configure( @@ -79,7 +79,7 @@ async def test_user_create_entry( # test success with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", return_value=None, ): result = await hass.config_entries.flow.async_configure( @@ -110,7 +110,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> # test PyViCareInvalidConfigurationError with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", side_effect=PyViCareInvalidConfigurationError( {"error": "foo", "error_description": "bar"} ), @@ -125,7 +125,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> # test success with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", return_value=None, ): result = await hass.config_entries.flow.async_configure( @@ -160,7 +160,7 @@ async def test_form_dhcp( assert result["errors"] == {} with patch( - f"{MODULE}.config_flow.vicare_login", + f"{MODULE}.config_flow.login", return_value=None, ): result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/vicare/test_fan.py b/tests/components/vicare/test_fan.py index ba5db6e42c7..aaf6a968ffd 100644 --- a/tests/components/vicare/test_fan.py +++ b/tests/components/vicare/test_fan.py @@ -25,7 +25,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:ventilation"}, "vicare/ViAir300F.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.FAN]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_init.py b/tests/components/vicare/test_init.py index 62bec7f50c5..d553f2758b8 100644 --- a/tests/components/vicare/test_init.py +++ b/tests/components/vicare/test_init.py @@ -26,7 +26,7 @@ async def test_device_and_entity_migration( Fixture({"type:boiler"}, "vicare/dummy-device-no-serial.json"), ] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), ): mock_config_entry.add_to_hass(hass) diff --git a/tests/components/vicare/test_number.py b/tests/components/vicare/test_number.py index c3aa66a86f6..7b9c1915b95 100644 --- a/tests/components/vicare/test_number.py +++ b/tests/components/vicare/test_number.py @@ -25,7 +25,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.NUMBER]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_sensor.py b/tests/components/vicare/test_sensor.py index 06c8b963680..afd3232478a 100644 --- a/tests/components/vicare/test_sensor.py +++ b/tests/components/vicare/test_sensor.py @@ -27,7 +27,7 @@ async def test_all_entities( Fixture({"type:boiler"}, "vicare/Vitodens300W.json"), ] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), ): await setup_integration(hass, mock_config_entry) @@ -48,7 +48,7 @@ async def test_room_sensors( Fixture({"type:climateSensor"}, "vicare/RoomSensor2.json"), ] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), ): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/vicare/test_water_heater.py b/tests/components/vicare/test_water_heater.py index fbb5863cf7a..f9ca431af6d 100644 --- a/tests/components/vicare/test_water_heater.py +++ b/tests/components/vicare/test_water_heater.py @@ -25,7 +25,7 @@ async def test_all_entities( """Test all entities.""" fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] with ( - patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.login", return_value=MockPyViCare(fixtures)), patch(f"{MODULE}.PLATFORMS", [Platform.WATER_HEATER]), ): await setup_integration(hass, mock_config_entry) From 29fa40a5cf276509160f3564d920a54c02294d76 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 18:07:05 -1000 Subject: [PATCH 656/677] Add backup the list of integrations platforms to preload (#133856) `backup` is now at the top of the startup time list. This will help reduce it. --- homeassistant/loader.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 1fa9d0cd49d..78c89b94765 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -78,6 +78,7 @@ BASE_PRELOAD_PLATFORMS = [ "repairs", "system_health", "trigger", + "backup", ] From de1b6a0dfcd2b679d020f8163db9c83ffb6c8bf0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 18:17:13 -1000 Subject: [PATCH 657/677] Add backup to the list of storage preloads (#133855) --- homeassistant/bootstrap.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 1034223051c..78c7d91fae0 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -252,6 +252,7 @@ PRELOAD_STORAGE = [ "assist_pipeline.pipelines", "core.analytics", "auth_module.totp", + "backup", ] From dcc9be02ca8cf5b024b7ec79e60e2504d941692c Mon Sep 17 00:00:00 2001 From: TheJulianJES Date: Mon, 23 Dec 2024 05:19:05 +0100 Subject: [PATCH 658/677] Bump ZHA to 0.0.43 (#133854) * Bump ZHA to 0.0.43 * Add strings for v2 quirk entities --- homeassistant/components/zha/manifest.json | 2 +- homeassistant/components/zha/strings.json | 108 +++++++++++++++++++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 111 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 3a301be9b02..e396c8776e7 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.42"], + "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.43"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index 4706e204872..8e4d3f78eb4 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -586,6 +586,12 @@ }, "preheat_status": { "name": "Pre-heat status" + }, + "open_window_detection_status": { + "name": "Open window detection status" + }, + "window_detection": { + "name": "Open window detection" } }, "button": { @@ -822,6 +828,57 @@ }, "approach_distance": { "name": "Approach distance" + }, + "fixed_load_demand": { + "name": "Fixed load demand" + }, + "display_brightness": { + "name": "Display brightness" + }, + "display_inactive_brightness": { + "name": "Display inactive brightness" + }, + "display_activity_timeout": { + "name": "Display activity timeout" + }, + "open_window_detection_threshold": { + "name": "Open window detection threshold" + }, + "open_window_event_duration": { + "name": "Open window event duration" + }, + "open_window_detection_guard_period": { + "name": "Open window detection guard period" + }, + "fallback_timeout": { + "name": "Fallback timeout" + }, + "boost_amount": { + "name": "Boost amount" + }, + "ambient_sensor_correction": { + "name": "Ambient sensor correction" + }, + "external_sensor_correction": { + "name": "External sensor correction" + }, + "move_sensitivity": { + "name": "Motion sensitivity" + }, + "detection_distance_min": { + "name": "Minimum range" + }, + "detection_distance_max": { + "name": "Maximum range" + }, + "presence_sensitivity": { + "name": "Presence sensitivity" + }, + "presence_timeout": { + "name": "Fade time" + }, + "regulator_set_point": { + "name": "Regulator set point" } }, "select": { @@ -926,6 +983,45 @@ }, "external_trigger_mode": { "name": "External trigger mode" + }, + "local_temperature_source": { + "name": "Local temperature source" + }, + "control_type": { + "name": "Control type" + }, + "thermostat_application": { + "name": "Thermostat application" + }, + "heating_fuel": { + "name": "Heating fuel" + }, + "heat_transfer_medium": { + "name": "Heat transfer medium" + }, + "heating_emitter_type": { + "name": "Heating emitter type" + }, + "external_temperature_sensor_type": { + "name": "External temperature sensor type" + }, + "preset_mode": { + "name": "Preset mode" + }, + "sensor_mode": { + "name": "Sensor mode" + }, + "thermostat_mode": { + "name": "Thermostat mode" + }, + "regulator_period": { + "name": "Regulator period" + }, + "click_mode": { + "name": "Click mode" + }, + "operation_mode": { + "name": "Operation mode" } }, "sensor": { @@ -1132,6 +1228,15 @@ }, "motion_distance": { "name": "Motion distance" + }, + "control_status": { + "name": "Control status" + }, + "distance": { + "name": "Target distance" + }, + "local_temperature_floor": { + "name": "Floor temperature" } }, "switch": { @@ -1257,6 +1362,9 @@ }, "enable_siren": { "name": "Enable siren" + }, + "find_switch": { + "name": "Distance switch" } } } diff --git a/requirements_all.txt b/requirements_all.txt index a02fe7f33ff..b194f249770 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3097,7 +3097,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.42 +zha==0.0.43 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.13 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bbf04fbf2d6..2576bdeedf7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2486,7 +2486,7 @@ zeroconf==0.136.2 zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.42 +zha==0.0.43 # homeassistant.components.zwave_js zwave-js-server-python==0.60.0 From 3658cdba4c865dc21977281f871edd35a024aed2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 18:19:44 -1000 Subject: [PATCH 659/677] Ensure late import in backup of hassio.backup does not block the event loop (#133857) * Ensure late import in backup of components.hassio.backup does not block the event loop Preload backup when loading hassio to ensure it happens in the executor https://github.com/home-assistant/core/blob/67f0de441b489890efa802a325f187b761098ad6/homeassistant/components/backup/__init__.py#L57 * improve comment --- homeassistant/components/hassio/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/hassio/__init__.py b/homeassistant/components/hassio/__init__.py index a2a9d8ff028..fec84737e78 100644 --- a/homeassistant/components/hassio/__init__.py +++ b/homeassistant/components/hassio/__init__.py @@ -64,7 +64,10 @@ from homeassistant.util.dt import now # config_flow, diagnostics, system_health, and entity platforms are imported to # ensure other dependencies that wait for hassio are not waiting # for hassio to import its platforms +# backup is pre-imported to ensure that the backup integration does not load +# it from the event loop from . import ( # noqa: F401 + backup, binary_sensor, config_flow, diagnostics, From cf45c670556d4d61b022b171f42ca3a6226f8747 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Mon, 23 Dec 2024 05:26:11 +0100 Subject: [PATCH 660/677] Fix TypeError in maxcube climate action inference logic (#133853) The maxcube-api library initializes the valve_position as a None value, so that during initialization if the cube does not respond quickly enough the comparison fails to compare a None-Type to an integer. --- homeassistant/components/maxcube/climate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/maxcube/climate.py b/homeassistant/components/maxcube/climate.py index da5a9f34dda..296da4f0ab4 100644 --- a/homeassistant/components/maxcube/climate.py +++ b/homeassistant/components/maxcube/climate.py @@ -171,8 +171,8 @@ class MaxCubeClimate(ClimateEntity): else: return None - # Assume heating when valve is open - if valve > 0: + # Assume heating when valve is open. + if valve: return HVACAction.HEATING return HVACAction.OFF if self.hvac_mode == HVACMode.OFF else HVACAction.IDLE From 6cdbdadc244f9257db5c5379ccf3032013ec5ec7 Mon Sep 17 00:00:00 2001 From: "Teemu R." Date: Mon, 23 Dec 2024 06:38:10 +0100 Subject: [PATCH 661/677] Ignore devices (bravias) with 'video' service_type for songpal discovery (#133724) --- homeassistant/components/songpal/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/songpal/config_flow.py b/homeassistant/components/songpal/config_flow.py index 41cc0763642..1c13013108f 100644 --- a/homeassistant/components/songpal/config_flow.py +++ b/homeassistant/components/songpal/config_flow.py @@ -116,7 +116,7 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): ] # Ignore Bravia TVs - if "videoScreen" in service_types: + if "videoScreen" in service_types or "video" in service_types: return self.async_abort(reason="not_songpal_device") if TYPE_CHECKING: From ad0ee8f2d6cddaff48544b51f32d5d41df2d2781 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 08:18:23 +0100 Subject: [PATCH 662/677] Bump github/codeql-action from 3.27.9 to 3.28.0 (#133862) --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index d3efa8ebaa3..511ec963db3 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.27.9 + uses: github/codeql-action/init@v3.28.0 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.27.9 + uses: github/codeql-action/analyze@v3.28.0 with: category: "/language:python" From 4321d27ed349583b2c4e18763dd51286010c8c0f Mon Sep 17 00:00:00 2001 From: jon6fingrs <53415122+jon6fingrs@users.noreply.github.com> Date: Mon, 23 Dec 2024 02:39:43 -0500 Subject: [PATCH 663/677] Ensure icalendar==6.1.0 is installed for caldav integration (#133541) --- homeassistant/components/caldav/manifest.json | 2 +- requirements_all.txt | 3 +++ requirements_test_all.txt | 3 +++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/caldav/manifest.json b/homeassistant/components/caldav/manifest.json index e0d598e6493..5c1334c8029 100644 --- a/homeassistant/components/caldav/manifest.json +++ b/homeassistant/components/caldav/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/caldav", "iot_class": "cloud_polling", "loggers": ["caldav", "vobject"], - "requirements": ["caldav==1.3.9"] + "requirements": ["caldav==1.3.9", "icalendar==6.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index b194f249770..65d9ca63667 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1177,6 +1177,9 @@ ibmiotf==0.3.4 # homeassistant.components.local_todo ical==8.2.0 +# homeassistant.components.caldav +icalendar==6.1.0 + # homeassistant.components.ping icmplib==3.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2576bdeedf7..b4d0fbf0432 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -997,6 +997,9 @@ ibeacon-ble==1.2.0 # homeassistant.components.local_todo ical==8.2.0 +# homeassistant.components.caldav +icalendar==6.1.0 + # homeassistant.components.ping icmplib==3.0 From ddb3edca5dd2dd56435d6377e6381478e9fcc75f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 22 Dec 2024 21:44:01 -1000 Subject: [PATCH 664/677] Bump PySwitchbot to 0.55.4 (#133861) --- homeassistant/components/switchbot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 3153e181af9..1b80da43e16 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.55.3"] + "requirements": ["PySwitchbot==0.55.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 65d9ca63667..661571b2cb9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.55.3 +PySwitchbot==0.55.4 # homeassistant.components.switchmate PySwitchmate==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b4d0fbf0432..9ff8ca7c990 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.55.3 +PySwitchbot==0.55.4 # homeassistant.components.syncthru PySyncThru==0.7.10 From 9e1ba004d4a880916c1cd38fa79579c52ed54829 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Mon, 23 Dec 2024 09:17:52 +0100 Subject: [PATCH 665/677] Add translated enum entity for Fronius error code (#133394) --- homeassistant/components/fronius/const.py | 161 +++ homeassistant/components/fronius/sensor.py | 10 + homeassistant/components/fronius/strings.json | 101 ++ .../fronius/snapshots/test_sensor.ambr | 976 ++++++++++++++++++ tests/components/fronius/test_sensor.py | 18 +- 5 files changed, 1257 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/fronius/const.py b/homeassistant/components/fronius/const.py index 273f1acab41..e8b2fa6c2e8 100644 --- a/homeassistant/components/fronius/const.py +++ b/homeassistant/components/fronius/const.py @@ -68,6 +68,167 @@ def get_inverter_status_message(code: StateType) -> InverterStatusCodeOption | N return _INVERTER_STATUS_CODES.get(code) # type: ignore[arg-type] +INVERTER_ERROR_CODES: Final[dict[int, str]] = { + 0: "no_error", + 102: "ac_voltage_too_high", + 103: "ac_voltage_too_low", + 105: "ac_frequency_too_high", + 106: "ac_frequency_too_low", + 107: "ac_grid_outside_permissible_limits", + 108: "stand_alone_operation_detected", + 112: "rcmu_error", + 240: "arc_detection_triggered", + 241: "arc_detection_triggered", + 242: "arc_detection_triggered", + 243: "arc_detection_triggered", + 301: "overcurrent_ac", + 302: "overcurrent_dc", + 303: "dc_module_over_temperature", + 304: "ac_module_over_temperature", + 305: "no_power_fed_in_despite_closed_relay", + 306: "pv_output_too_low_for_feeding_energy_into_the_grid", + 307: "low_pv_voltage_dc_input_voltage_too_low", + 308: "intermediate_circuit_voltage_too_high", + 309: "dc_input_voltage_mppt_1_too_high", + 311: "polarity_of_dc_strings_reversed", + 313: "dc_input_voltage_mppt_2_too_high", + 314: "current_sensor_calibration_timeout", + 315: "ac_current_sensor_error", + 316: "interrupt_check_fail", + 325: "overtemperature_in_connection_area", + 326: "fan_1_error", + 327: "fan_2_error", + 401: "no_communication_with_power_stage_set", + 406: "ac_module_temperature_sensor_faulty_l1", + 407: "ac_module_temperature_sensor_faulty_l2", + 408: "dc_component_measured_in_grid_too_high", + 412: "fixed_voltage_mode_out_of_range", + 415: "safety_cut_out_triggered", + 416: "no_communication_between_power_stage_and_control_system", + 417: "hardware_id_problem", + 419: "unique_id_conflict", + 420: "no_communication_with_hybrid_manager", + 421: "hid_range_error", + 425: "no_communication_with_power_stage_set", + 426: "possible_hardware_fault", + 427: "possible_hardware_fault", + 428: "possible_hardware_fault", + 431: "software_problem", + 436: "functional_incompatibility_between_pc_boards", + 437: "power_stage_set_problem", + 438: "functional_incompatibility_between_pc_boards", + 443: "intermediate_circuit_voltage_too_low_or_asymmetric", + 445: "compatibility_error_invalid_power_stage_configuration", + 447: "insulation_fault", + 448: "neutral_conductor_not_connected", + 450: "guard_cannot_be_found", + 451: "memory_error_detected", + 452: "communication", + 502: "insulation_error_on_solar_panels", + 509: "no_energy_fed_into_grid_past_24_hours", + 515: "no_communication_with_filter", + 516: "no_communication_with_storage_unit", + 517: "power_derating_due_to_high_temperature", + 518: "internal_dsp_malfunction", + 519: "no_communication_with_storage_unit", + 520: "no_energy_fed_by_mppt1_past_24_hours", + 522: "dc_low_string_1", + 523: "dc_low_string_2", + 558: "functional_incompatibility_between_pc_boards", + 559: "functional_incompatibility_between_pc_boards", + 560: "derating_caused_by_over_frequency", + 564: "functional_incompatibility_between_pc_boards", + 566: "arc_detector_switched_off", + 567: "grid_voltage_dependent_power_reduction_active", + 601: "can_bus_full", + 603: "ac_module_temperature_sensor_faulty_l3", + 604: "dc_module_temperature_sensor_faulty", + 607: "rcmu_error", + 608: "functional_incompatibility_between_pc_boards", + 701: "internal_processor_status", + 702: "internal_processor_status", + 703: "internal_processor_status", + 704: "internal_processor_status", + 705: "internal_processor_status", + 706: "internal_processor_status", + 707: "internal_processor_status", + 708: "internal_processor_status", + 709: "internal_processor_status", + 710: "internal_processor_status", + 711: "internal_processor_status", + 712: "internal_processor_status", + 713: "internal_processor_status", + 714: "internal_processor_status", + 715: "internal_processor_status", + 716: "internal_processor_status", + 721: "eeprom_reinitialised", + 722: "internal_processor_status", + 723: "internal_processor_status", + 724: "internal_processor_status", + 725: "internal_processor_status", + 726: "internal_processor_status", + 727: "internal_processor_status", + 728: "internal_processor_status", + 729: "internal_processor_status", + 730: "internal_processor_status", + 731: "initialisation_error_usb_flash_drive_not_supported", + 732: "initialisation_error_usb_stick_over_current", + 733: "no_usb_flash_drive_connected", + 734: "update_file_not_recognised_or_missing", + 735: "update_file_does_not_match_device", + 736: "write_or_read_error_occurred", + 737: "file_could_not_be_opened", + 738: "log_file_cannot_be_saved", + 740: "initialisation_error_file_system_error_on_usb", + 741: "error_during_logging_data_recording", + 743: "error_during_update_process", + 745: "update_file_corrupt", + 746: "error_during_update_process", + 751: "time_lost", + 752: "real_time_clock_communication_error", + 753: "real_time_clock_in_emergency_mode", + 754: "internal_processor_status", + 755: "internal_processor_status", + 757: "real_time_clock_hardware_error", + 758: "real_time_clock_in_emergency_mode", + 760: "internal_hardware_error", + 761: "internal_processor_status", + 762: "internal_processor_status", + 763: "internal_processor_status", + 764: "internal_processor_status", + 765: "internal_processor_status", + 766: "emergency_power_derating_activated", + 767: "internal_processor_status", + 768: "different_power_limitation_in_hardware_modules", + 772: "storage_unit_not_available", + 773: "software_update_invalid_country_setup", + 775: "pmc_power_stage_set_not_available", + 776: "invalid_device_type", + 781: "internal_processor_status", + 782: "internal_processor_status", + 783: "internal_processor_status", + 784: "internal_processor_status", + 785: "internal_processor_status", + 786: "internal_processor_status", + 787: "internal_processor_status", + 788: "internal_processor_status", + 789: "internal_processor_status", + 790: "internal_processor_status", + 791: "internal_processor_status", + 792: "internal_processor_status", + 793: "internal_processor_status", + 794: "internal_processor_status", + 1001: "insulation_measurement_triggered", + 1024: "inverter_settings_changed_restart_required", + 1030: "wired_shut_down_triggered", + 1036: "grid_frequency_exceeded_limit_reconnecting", + 1112: "mains_voltage_dependent_power_reduction", + 1175: "too_little_dc_power_for_feed_in_operation", + 1196: "inverter_required_setup_values_not_received", + 65000: "dc_connection_inverter_battery_interrupted", +} + + class MeterLocationCodeOption(StrEnum): """Meter location codes for Fronius meters.""" diff --git a/homeassistant/components/fronius/sensor.py b/homeassistant/components/fronius/sensor.py index 95c5df269e4..03f666ffafd 100644 --- a/homeassistant/components/fronius/sensor.py +++ b/homeassistant/components/fronius/sensor.py @@ -33,6 +33,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( DOMAIN, + INVERTER_ERROR_CODES, SOLAR_NET_DISCOVERY_NEW, InverterStatusCodeOption, MeterLocationCodeOption, @@ -205,6 +206,15 @@ INVERTER_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [ FroniusSensorEntityDescription( key="error_code", entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + FroniusSensorEntityDescription( + key="error_message", + response_key="error_code", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.ENUM, + options=list(dict.fromkeys(INVERTER_ERROR_CODES.values())), + value_fn=INVERTER_ERROR_CODES.get, # type: ignore[arg-type] ), FroniusSensorEntityDescription( key="status_code", diff --git a/homeassistant/components/fronius/strings.json b/homeassistant/components/fronius/strings.json index e2740c76696..b77f6fec83c 100644 --- a/homeassistant/components/fronius/strings.json +++ b/homeassistant/components/fronius/strings.json @@ -73,6 +73,107 @@ "error_code": { "name": "Error code" }, + "error_message": { + "name": "Error message", + "state": { + "no_error": "No error", + "ac_voltage_too_high": "AC voltage too high", + "ac_voltage_too_low": "AC voltage too low", + "ac_frequency_too_high": "AC frequency too high", + "ac_frequency_too_low": "AC frequency too low", + "ac_grid_outside_permissible_limits": "AC grid outside the permissible limits", + "stand_alone_operation_detected": "Stand alone operation detected", + "rcmu_error": "RCMU error", + "arc_detection_triggered": "Arc detection triggered", + "overcurrent_ac": "Overcurrent (AC)", + "overcurrent_dc": "Overcurrent (DC)", + "dc_module_over_temperature": "DC module over temperature", + "ac_module_over_temperature": "AC module over temperature", + "no_power_fed_in_despite_closed_relay": "No power being fed in, despite closed relay", + "pv_output_too_low_for_feeding_energy_into_the_grid": "PV output too low for feeding energy into the grid", + "low_pv_voltage_dc_input_voltage_too_low": "Low PV voltage - DC input voltage too low for feeding energy into the grid", + "intermediate_circuit_voltage_too_high": "Intermediate circuit voltage too high", + "dc_input_voltage_mppt_1_too_high": "DC input voltage MPPT 1 too high", + "polarity_of_dc_strings_reversed": "Polarity of DC strings reversed", + "dc_input_voltage_mppt_2_too_high": "DC input voltage MPPT 2 too high", + "current_sensor_calibration_timeout": "Current sensor calibration timeout", + "ac_current_sensor_error": "AC current sensor error", + "interrupt_check_fail": "Interrupt Check fail", + "overtemperature_in_connection_area": "Overtemperature in the connection area", + "fan_1_error": "Fan 1 error", + "fan_2_error": "Fan 2 error", + "no_communication_with_power_stage_set": "No communication with the power stage set possible", + "ac_module_temperature_sensor_faulty_l1": "AC module temperature sensor faulty (L1)", + "ac_module_temperature_sensor_faulty_l2": "AC module temperature sensor faulty (L2)", + "dc_component_measured_in_grid_too_high": "DC component measured in the grid too high", + "fixed_voltage_mode_out_of_range": "Fixed voltage mode has been selected instead of MPP voltage mode and the fixed voltage has been set to too low or too high a value", + "safety_cut_out_triggered": "Safety cut out via option card or RECERBO has triggered", + "no_communication_between_power_stage_and_control_system": "No communication possible between power stage set and control system", + "hardware_id_problem": "Hardware ID problem", + "unique_id_conflict": "Unique ID conflict", + "no_communication_with_hybrid_manager": "No communication possible with the Hybrid manager", + "hid_range_error": "HID range error", + "possible_hardware_fault": "Possible hardware fault", + "software_problem": "Software problem", + "functional_incompatibility_between_pc_boards": "Functional incompatibility (one or more PC boards in the inverter are not compatible with each other, e.g. after a PC board has been replaced)", + "power_stage_set_problem": "Power stage set problem", + "intermediate_circuit_voltage_too_low_or_asymmetric": "Intermediate circuit voltage too low or asymmetric", + "compatibility_error_invalid_power_stage_configuration": "Compatibility error (e.g. due to replacement of a PC board) - invalid power stage set configuration", + "insulation_fault": "Insulation fault", + "neutral_conductor_not_connected": "Neutral conductor not connected", + "guard_cannot_be_found": "Guard cannot be found", + "memory_error_detected": "Memory error detected", + "communication": "Communication error", + "insulation_error_on_solar_panels": "Insulation error on the solar panels", + "no_energy_fed_into_grid_past_24_hours": "No energy fed into the grid in the past 24 hours", + "no_communication_with_filter": "No communication with filter possible", + "no_communication_with_storage_unit": "No communication possible with the storage unit", + "power_derating_due_to_high_temperature": "Power derating caused by too high a temperature", + "internal_dsp_malfunction": "Internal DSP malfunction", + "no_energy_fed_by_mppt1_past_24_hours": "No energy fed into the grid by MPPT1 in the past 24 hours", + "dc_low_string_1": "DC low string 1", + "dc_low_string_2": "DC low string 2", + "derating_caused_by_over_frequency": "Derating caused by over-frequency", + "arc_detector_switched_off": "Arc detector switched off (e.g. during external arc monitoring)", + "grid_voltage_dependent_power_reduction_active": "Grid Voltage Dependent Power Reduction is active", + "can_bus_full": "CAN bus is full", + "ac_module_temperature_sensor_faulty_l3": "AC module temperature sensor faulty (L3)", + "dc_module_temperature_sensor_faulty": "DC module temperature sensor faulty", + "internal_processor_status": "Warning about the internal processor status. See status code for more information", + "eeprom_reinitialised": "EEPROM has been re-initialised", + "initialisation_error_usb_flash_drive_not_supported": "Initialisation error – USB flash drive is not supported", + "initialisation_error_usb_stick_over_current": "Initialisation error – Over current on USB stick", + "no_usb_flash_drive_connected": "No USB flash drive connected", + "update_file_not_recognised_or_missing": "Update file not recognised or not present", + "update_file_does_not_match_device": "Update file does not match the device, update file too old", + "write_or_read_error_occurred": "Write or read error occurred", + "file_could_not_be_opened": "File could not be opened", + "log_file_cannot_be_saved": "Log file cannot be saved (e.g. USB flash drive is write protected or full)", + "initialisation_error_file_system_error_on_usb": "Initialisation error in file system on USB flash drive", + "error_during_logging_data_recording": "Error during recording of logging data", + "error_during_update_process": "Error occurred during update process", + "update_file_corrupt": "Update file corrupt", + "time_lost": "Time lost", + "real_time_clock_communication_error": "Real Time Clock module communication error", + "real_time_clock_in_emergency_mode": "Internal error: Real Time Clock module is in emergency mode", + "real_time_clock_hardware_error": "Hardware error in the Real Time Clock module", + "internal_hardware_error": "Internal hardware error", + "emergency_power_derating_activated": "Emergency power derating activated", + "different_power_limitation_in_hardware_modules": "Different power limitation in the hardware modules", + "storage_unit_not_available": "Storage unit not available", + "software_update_invalid_country_setup": "Software update group 0 (invalid country setup)", + "pmc_power_stage_set_not_available": "PMC power stage set not available", + "invalid_device_type": "Invalid device type", + "insulation_measurement_triggered": "Insulation measurement triggered", + "inverter_settings_changed_restart_required": "Inverter settings have been changed, inverter restart required", + "wired_shut_down_triggered": "Wired shut down triggered", + "grid_frequency_exceeded_limit_reconnecting": "The grid frequency has exceeded a limit value when reconnecting", + "mains_voltage_dependent_power_reduction": "Mains voltage-dependent power reduction", + "too_little_dc_power_for_feed_in_operation": "Too little DC power for feed-in operation", + "inverter_required_setup_values_not_received": "Inverter required setup values could not be received", + "dc_connection_inverter_battery_interrupted": "DC connection between inverter and battery interrupted" + } + }, "status_code": { "name": "Status code" }, diff --git a/tests/components/fronius/snapshots/test_sensor.ambr b/tests/components/fronius/snapshots/test_sensor.ambr index 8f8c9d919fc..81770893273 100644 --- a/tests/components/fronius/snapshots/test_sensor.ambr +++ b/tests/components/fronius/snapshots/test_sensor.ambr @@ -402,6 +402,250 @@ 'state': '0', }) # --- +# name: test_gen24[sensor.inverter_name_error_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.inverter_name_error_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Error message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_message', + 'unique_id': '12345678-error_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24[sensor.inverter_name_error_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Inverter name Error message', + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'context': , + 'entity_id': 'sensor.inverter_name_error_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'no_error', + }) +# --- # name: test_gen24[sensor.inverter_name_frequency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3653,6 +3897,250 @@ 'state': '0', }) # --- +# name: test_gen24_storage[sensor.gen24_storage_error_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gen24_storage_error_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Error message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_message', + 'unique_id': '12345678-error_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_gen24_storage[sensor.gen24_storage_error_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gen24 Storage Error message', + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'context': , + 'entity_id': 'sensor.gen24_storage_error_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'no_error', + }) +# --- # name: test_gen24_storage[sensor.gen24_storage_frequency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -7022,6 +7510,250 @@ 'state': '0', }) # --- +# name: test_primo_s0[sensor.primo_3_0_1_error_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_3_0_1_error_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Error message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_message', + 'unique_id': '234567-error_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_3_0_1_error_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Primo 3.0-1 Error message', + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'context': , + 'entity_id': 'sensor.primo_3_0_1_error_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'no_error', + }) +# --- # name: test_primo_s0[sensor.primo_3_0_1_frequency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -7733,6 +8465,250 @@ 'state': '0', }) # --- +# name: test_primo_s0[sensor.primo_5_0_1_error_message-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.primo_5_0_1_error_message', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Error message', + 'platform': 'fronius', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'error_message', + 'unique_id': '123456-error_message', + 'unit_of_measurement': None, + }) +# --- +# name: test_primo_s0[sensor.primo_5_0_1_error_message-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Primo 5.0-1 Error message', + 'options': list([ + 'no_error', + 'ac_voltage_too_high', + 'ac_voltage_too_low', + 'ac_frequency_too_high', + 'ac_frequency_too_low', + 'ac_grid_outside_permissible_limits', + 'stand_alone_operation_detected', + 'rcmu_error', + 'arc_detection_triggered', + 'overcurrent_ac', + 'overcurrent_dc', + 'dc_module_over_temperature', + 'ac_module_over_temperature', + 'no_power_fed_in_despite_closed_relay', + 'pv_output_too_low_for_feeding_energy_into_the_grid', + 'low_pv_voltage_dc_input_voltage_too_low', + 'intermediate_circuit_voltage_too_high', + 'dc_input_voltage_mppt_1_too_high', + 'polarity_of_dc_strings_reversed', + 'dc_input_voltage_mppt_2_too_high', + 'current_sensor_calibration_timeout', + 'ac_current_sensor_error', + 'interrupt_check_fail', + 'overtemperature_in_connection_area', + 'fan_1_error', + 'fan_2_error', + 'no_communication_with_power_stage_set', + 'ac_module_temperature_sensor_faulty_l1', + 'ac_module_temperature_sensor_faulty_l2', + 'dc_component_measured_in_grid_too_high', + 'fixed_voltage_mode_out_of_range', + 'safety_cut_out_triggered', + 'no_communication_between_power_stage_and_control_system', + 'hardware_id_problem', + 'unique_id_conflict', + 'no_communication_with_hybrid_manager', + 'hid_range_error', + 'possible_hardware_fault', + 'software_problem', + 'functional_incompatibility_between_pc_boards', + 'power_stage_set_problem', + 'intermediate_circuit_voltage_too_low_or_asymmetric', + 'compatibility_error_invalid_power_stage_configuration', + 'insulation_fault', + 'neutral_conductor_not_connected', + 'guard_cannot_be_found', + 'memory_error_detected', + 'communication', + 'insulation_error_on_solar_panels', + 'no_energy_fed_into_grid_past_24_hours', + 'no_communication_with_filter', + 'no_communication_with_storage_unit', + 'power_derating_due_to_high_temperature', + 'internal_dsp_malfunction', + 'no_energy_fed_by_mppt1_past_24_hours', + 'dc_low_string_1', + 'dc_low_string_2', + 'derating_caused_by_over_frequency', + 'arc_detector_switched_off', + 'grid_voltage_dependent_power_reduction_active', + 'can_bus_full', + 'ac_module_temperature_sensor_faulty_l3', + 'dc_module_temperature_sensor_faulty', + 'internal_processor_status', + 'eeprom_reinitialised', + 'initialisation_error_usb_flash_drive_not_supported', + 'initialisation_error_usb_stick_over_current', + 'no_usb_flash_drive_connected', + 'update_file_not_recognised_or_missing', + 'update_file_does_not_match_device', + 'write_or_read_error_occurred', + 'file_could_not_be_opened', + 'log_file_cannot_be_saved', + 'initialisation_error_file_system_error_on_usb', + 'error_during_logging_data_recording', + 'error_during_update_process', + 'update_file_corrupt', + 'time_lost', + 'real_time_clock_communication_error', + 'real_time_clock_in_emergency_mode', + 'real_time_clock_hardware_error', + 'internal_hardware_error', + 'emergency_power_derating_activated', + 'different_power_limitation_in_hardware_modules', + 'storage_unit_not_available', + 'software_update_invalid_country_setup', + 'pmc_power_stage_set_not_available', + 'invalid_device_type', + 'insulation_measurement_triggered', + 'inverter_settings_changed_restart_required', + 'wired_shut_down_triggered', + 'grid_frequency_exceeded_limit_reconnecting', + 'mains_voltage_dependent_power_reduction', + 'too_little_dc_power_for_feed_in_operation', + 'inverter_required_setup_values_not_received', + 'dc_connection_inverter_battery_interrupted', + ]), + }), + 'context': , + 'entity_id': 'sensor.primo_5_0_1_error_message', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'no_error', + }) +# --- # name: test_primo_s0[sensor.primo_5_0_1_frequency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/fronius/test_sensor.py b/tests/components/fronius/test_sensor.py index b5d051d56ca..63f36705c8f 100644 --- a/tests/components/fronius/test_sensor.py +++ b/tests/components/fronius/test_sensor.py @@ -36,7 +36,7 @@ async def test_symo_inverter( mock_responses(aioclient_mock, night=True) await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 59 assert_state("sensor.symo_20_dc_current", 0) assert_state("sensor.symo_20_energy_day", 10828) assert_state("sensor.symo_20_total_energy", 44186900) @@ -49,7 +49,7 @@ async def test_symo_inverter( freezer.tick(FroniusInverterUpdateCoordinator.default_interval) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 65 # 4 additional AC entities assert_state("sensor.symo_20_dc_current", 2.19) assert_state("sensor.symo_20_energy_day", 1113) @@ -108,7 +108,7 @@ async def test_symo_meter( mock_responses(aioclient_mock) await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 64 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 65 # states are rounded to 4 decimals assert_state("sensor.smart_meter_63a_current_phase_1", 7.755) assert_state("sensor.smart_meter_63a_current_phase_2", 6.68) @@ -205,7 +205,7 @@ async def test_symo_power_flow( mock_responses(aioclient_mock, night=True) await setup_fronius_integration(hass) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 59 # states are rounded to 4 decimals assert_state("sensor.solarnet_energy_day", 10828) assert_state("sensor.solarnet_total_energy", 44186900) @@ -223,7 +223,7 @@ async def test_symo_power_flow( async_fire_time_changed(hass) await hass.async_block_till_done() # 54 because power_flow `rel_SelfConsumption` and `P_PV` is not `null` anymore - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 60 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 61 assert_state("sensor.solarnet_energy_day", 1101.7001) assert_state("sensor.solarnet_total_energy", 44188000) assert_state("sensor.solarnet_energy_year", 25508788) @@ -242,7 +242,7 @@ async def test_symo_power_flow( freezer.tick(FroniusPowerFlowUpdateCoordinator.default_interval) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 60 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 61 assert_state("sensor.solarnet_energy_day", 10828) assert_state("sensor.solarnet_total_energy", 44186900) assert_state("sensor.solarnet_energy_year", 25507686) @@ -271,7 +271,7 @@ async def test_gen24( mock_responses(aioclient_mock, fixture_set="gen24") config_entry = await setup_fronius_integration(hass, is_logger=False) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 58 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 59 await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) assert_state("sensor.inverter_name_total_energy", 1530193.42) @@ -313,7 +313,7 @@ async def test_gen24_storage( hass, is_logger=False, unique_id="12345678" ) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 72 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 73 await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Devices @@ -367,7 +367,7 @@ async def test_primo_s0( mock_responses(aioclient_mock, fixture_set="primo_s0", inverter_ids=[1, 2]) config_entry = await setup_fronius_integration(hass, is_logger=True) - assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 47 + assert len(hass.states.async_all(domain_filter=SENSOR_DOMAIN)) == 49 await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Devices From 8991cd4f4622fcc10c80d98316d4c2121de14094 Mon Sep 17 00:00:00 2001 From: mrtlhfr <10065880+mrtlhfr@users.noreply.github.com> Date: Mon, 23 Dec 2024 03:23:04 -0500 Subject: [PATCH 666/677] Adding initial support for Tuya Electric Fireplaces (#133503) --- homeassistant/components/tuya/climate.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/tuya/climate.py b/homeassistant/components/tuya/climate.py index 62aa29494e9..1780256a740 100644 --- a/homeassistant/components/tuya/climate.py +++ b/homeassistant/components/tuya/climate.py @@ -77,6 +77,9 @@ CLIMATE_DESCRIPTIONS: dict[str, TuyaClimateEntityDescription] = { key="wkf", switch_only_hvac_mode=HVACMode.HEAT, ), + # Electric Fireplace + # https://developer.tuya.com/en/docs/iot/f?id=Kacpeobojffop + "dbl": TuyaClimateEntityDescription(key="dbl", switch_only_hvac_mode=HVACMode.HEAT), } From b1fe247eed570f9d32ade7eab35a2c6dcb87341d Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Mon, 23 Dec 2024 09:23:13 +0100 Subject: [PATCH 667/677] Upgrade QS from silver to gold for slide_local (#133863) Upgrade QS to gold --- homeassistant/components/slide_local/manifest.json | 2 +- homeassistant/components/slide_local/quality_scale.yaml | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/slide_local/manifest.json b/homeassistant/components/slide_local/manifest.json index 69d5c93b0af..7e524c54a25 100644 --- a/homeassistant/components/slide_local/manifest.json +++ b/homeassistant/components/slide_local/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/slide_local", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "silver", + "quality_scale": "gold", "requirements": ["goslide-api==0.7.0"], "zeroconf": [ { diff --git a/homeassistant/components/slide_local/quality_scale.yaml b/homeassistant/components/slide_local/quality_scale.yaml index 54dfd87d98c..0bb30ee8269 100644 --- a/homeassistant/components/slide_local/quality_scale.yaml +++ b/homeassistant/components/slide_local/quality_scale.yaml @@ -56,7 +56,9 @@ rules: comment: | Slide_local represents a single physical device, no dynamic changes of devices possible (besides removal of instance itself). discovery-update-info: done - repair-issues: todo + repair-issues: + status: exempt + comment: No issues/repairs. docs-use-cases: done docs-supported-devices: done docs-supported-functions: done From a6f631729962ef9d7a4bb0f82f30ac399d9ccfae Mon Sep 17 00:00:00 2001 From: Matrix Date: Mon, 23 Dec 2024 16:24:02 +0800 Subject: [PATCH 668/677] Add Leak detect entity for YoLink water meter controller (#131682) --- homeassistant/components/yolink/binary_sensor.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/homeassistant/components/yolink/binary_sensor.py b/homeassistant/components/yolink/binary_sensor.py index 07a1fb07cc0..fa4c2202b03 100644 --- a/homeassistant/components/yolink/binary_sensor.py +++ b/homeassistant/components/yolink/binary_sensor.py @@ -12,6 +12,7 @@ from yolink.const import ( ATTR_DEVICE_LEAK_SENSOR, ATTR_DEVICE_MOTION_SENSOR, ATTR_DEVICE_VIBRATION_SENSOR, + ATTR_DEVICE_WATER_METER_CONTROLLER, ) from yolink.device import YoLinkDevice @@ -44,6 +45,7 @@ SENSOR_DEVICE_TYPE = [ ATTR_DEVICE_LEAK_SENSOR, ATTR_DEVICE_VIBRATION_SENSOR, ATTR_DEVICE_CO_SMOKE_SENSOR, + ATTR_DEVICE_WATER_METER_CONTROLLER, ] @@ -84,6 +86,15 @@ SENSOR_TYPES: tuple[YoLinkBinarySensorEntityDescription, ...] = ( value=lambda state: state.get("smokeAlarm"), exists_fn=lambda device: device.device_type == ATTR_DEVICE_CO_SMOKE_SENSOR, ), + YoLinkBinarySensorEntityDescription( + key="pipe_leak_detected", + state_key="alarm", + device_class=BinarySensorDeviceClass.MOISTURE, + value=lambda state: state.get("leak") if state is not None else None, + exists_fn=lambda device: ( + device.device_type == ATTR_DEVICE_WATER_METER_CONTROLLER + ), + ), ) From 59d8c79371b4b7029e7faf64d53934c32e53e099 Mon Sep 17 00:00:00 2001 From: Duco Sebel <74970928+DCSBL@users.noreply.github.com> Date: Mon, 23 Dec 2024 10:27:53 +0100 Subject: [PATCH 669/677] Use user defined charge limit for charge limit range in Peblar (#133868) --- homeassistant/components/peblar/number.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/peblar/number.py b/homeassistant/components/peblar/number.py index d2983438a91..1a7cec43295 100644 --- a/homeassistant/components/peblar/number.py +++ b/homeassistant/components/peblar/number.py @@ -46,7 +46,7 @@ DESCRIPTIONS = [ entity_category=EntityCategory.CONFIG, native_step=1, native_min_value=6, - native_max_value_fn=lambda x: x.system_information.hardware_max_current, + native_max_value_fn=lambda x: x.user_configuration_coordinator.data.user_defined_charge_limit_current, native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, set_value_fn=lambda x, v: x.ev_interface(charge_current_limit=int(v) * 1000), value_fn=lambda x: round(x.ev.charge_current_limit / 1000), From 83f5ca5a303178b5ba2f08b48195c5fcd56f2c2f Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Mon, 23 Dec 2024 11:10:10 +0100 Subject: [PATCH 670/677] Add actions with response values to Music Assistant (#133521) Co-authored-by: Franck Nijhof Co-authored-by: OzGav Co-authored-by: Joost Lekkerkerker --- .../components/music_assistant/__init__.py | 12 + .../components/music_assistant/actions.py | 212 ++++++++++++++++++ .../components/music_assistant/const.py | 50 +++++ .../components/music_assistant/icons.json | 5 +- .../music_assistant/media_player.py | 73 ++++-- .../components/music_assistant/schemas.py | 182 +++++++++++++++ .../components/music_assistant/services.yaml | 143 ++++++++++++ .../components/music_assistant/strings.json | 111 +++++++++ tests/components/music_assistant/common.py | 3 +- .../snapshots/test_actions.ambr | 202 +++++++++++++++++ .../snapshots/test_media_player.ambr | 85 +++++++ .../music_assistant/test_actions.py | 68 ++++++ .../music_assistant/test_media_player.py | 24 ++ 13 files changed, 1155 insertions(+), 15 deletions(-) create mode 100644 homeassistant/components/music_assistant/actions.py create mode 100644 homeassistant/components/music_assistant/schemas.py create mode 100644 tests/components/music_assistant/snapshots/test_actions.ambr create mode 100644 tests/components/music_assistant/test_actions.py diff --git a/homeassistant/components/music_assistant/__init__.py b/homeassistant/components/music_assistant/__init__.py index 22de510ebe3..052f4f556c1 100644 --- a/homeassistant/components/music_assistant/__init__.py +++ b/homeassistant/components/music_assistant/__init__.py @@ -17,22 +17,28 @@ from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession +import homeassistant.helpers.config_validation as cv from homeassistant.helpers.issue_registry import ( IssueSeverity, async_create_issue, async_delete_issue, ) +from .actions import register_actions from .const import DOMAIN, LOGGER if TYPE_CHECKING: from music_assistant_models.event import MassEvent + from homeassistant.helpers.typing import ConfigType + PLATFORMS = [Platform.MEDIA_PLAYER] CONNECT_TIMEOUT = 10 LISTEN_READY_TIMEOUT = 30 +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + type MusicAssistantConfigEntry = ConfigEntry[MusicAssistantEntryData] @@ -44,6 +50,12 @@ class MusicAssistantEntryData: listen_task: asyncio.Task +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Music Assistant component.""" + register_actions(hass) + return True + + async def async_setup_entry( hass: HomeAssistant, entry: MusicAssistantConfigEntry ) -> bool: diff --git a/homeassistant/components/music_assistant/actions.py b/homeassistant/components/music_assistant/actions.py new file mode 100644 index 00000000000..f3297bf0a6f --- /dev/null +++ b/homeassistant/components/music_assistant/actions.py @@ -0,0 +1,212 @@ +"""Custom actions (previously known as services) for the Music Assistant integration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from music_assistant_models.enums import MediaType +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, + callback, +) +from homeassistant.exceptions import ServiceValidationError +import homeassistant.helpers.config_validation as cv + +from .const import ( + ATTR_ALBUM_ARTISTS_ONLY, + ATTR_ALBUM_TYPE, + ATTR_ALBUMS, + ATTR_ARTISTS, + ATTR_CONFIG_ENTRY_ID, + ATTR_FAVORITE, + ATTR_ITEMS, + ATTR_LIBRARY_ONLY, + ATTR_LIMIT, + ATTR_MEDIA_TYPE, + ATTR_OFFSET, + ATTR_ORDER_BY, + ATTR_PLAYLISTS, + ATTR_RADIO, + ATTR_SEARCH, + ATTR_SEARCH_ALBUM, + ATTR_SEARCH_ARTIST, + ATTR_SEARCH_NAME, + ATTR_TRACKS, + DOMAIN, +) +from .schemas import ( + LIBRARY_RESULTS_SCHEMA, + SEARCH_RESULT_SCHEMA, + media_item_dict_from_mass_item, +) + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + + from . import MusicAssistantConfigEntry + +SERVICE_SEARCH = "search" +SERVICE_GET_LIBRARY = "get_library" +DEFAULT_OFFSET = 0 +DEFAULT_LIMIT = 25 +DEFAULT_SORT_ORDER = "name" + + +@callback +def get_music_assistant_client( + hass: HomeAssistant, config_entry_id: str +) -> MusicAssistantClient: + """Get the Music Assistant client for the given config entry.""" + entry: MusicAssistantConfigEntry | None + if not (entry := hass.config_entries.async_get_entry(config_entry_id)): + raise ServiceValidationError("Entry not found") + if entry.state is not ConfigEntryState.LOADED: + raise ServiceValidationError("Entry not loaded") + return entry.runtime_data.mass + + +@callback +def register_actions(hass: HomeAssistant) -> None: + """Register custom actions.""" + hass.services.async_register( + DOMAIN, + SERVICE_SEARCH, + handle_search, + schema=vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): str, + vol.Required(ATTR_SEARCH_NAME): cv.string, + vol.Optional(ATTR_MEDIA_TYPE): vol.All( + cv.ensure_list, [vol.Coerce(MediaType)] + ), + vol.Optional(ATTR_SEARCH_ARTIST): cv.string, + vol.Optional(ATTR_SEARCH_ALBUM): cv.string, + vol.Optional(ATTR_LIMIT, default=5): vol.Coerce(int), + vol.Optional(ATTR_LIBRARY_ONLY, default=False): cv.boolean, + } + ), + supports_response=SupportsResponse.ONLY, + ) + hass.services.async_register( + DOMAIN, + SERVICE_GET_LIBRARY, + handle_get_library, + schema=vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): str, + vol.Required(ATTR_MEDIA_TYPE): vol.Coerce(MediaType), + vol.Optional(ATTR_FAVORITE): cv.boolean, + vol.Optional(ATTR_SEARCH): cv.string, + vol.Optional(ATTR_LIMIT): cv.positive_int, + vol.Optional(ATTR_OFFSET): int, + vol.Optional(ATTR_ORDER_BY): cv.string, + vol.Optional(ATTR_ALBUM_TYPE): list[MediaType], + vol.Optional(ATTR_ALBUM_ARTISTS_ONLY): cv.boolean, + } + ), + supports_response=SupportsResponse.ONLY, + ) + + +async def handle_search(call: ServiceCall) -> ServiceResponse: + """Handle queue_command action.""" + mass = get_music_assistant_client(call.hass, call.data[ATTR_CONFIG_ENTRY_ID]) + search_name = call.data[ATTR_SEARCH_NAME] + search_artist = call.data.get(ATTR_SEARCH_ARTIST) + search_album = call.data.get(ATTR_SEARCH_ALBUM) + if search_album and search_artist: + search_name = f"{search_artist} - {search_album} - {search_name}" + elif search_album: + search_name = f"{search_album} - {search_name}" + elif search_artist: + search_name = f"{search_artist} - {search_name}" + search_results = await mass.music.search( + search_query=search_name, + media_types=call.data.get(ATTR_MEDIA_TYPE, MediaType.ALL), + limit=call.data[ATTR_LIMIT], + library_only=call.data[ATTR_LIBRARY_ONLY], + ) + response: ServiceResponse = SEARCH_RESULT_SCHEMA( + { + ATTR_ARTISTS: [ + media_item_dict_from_mass_item(mass, item) + for item in search_results.artists + ], + ATTR_ALBUMS: [ + media_item_dict_from_mass_item(mass, item) + for item in search_results.albums + ], + ATTR_TRACKS: [ + media_item_dict_from_mass_item(mass, item) + for item in search_results.tracks + ], + ATTR_PLAYLISTS: [ + media_item_dict_from_mass_item(mass, item) + for item in search_results.playlists + ], + ATTR_RADIO: [ + media_item_dict_from_mass_item(mass, item) + for item in search_results.radio + ], + } + ) + return response + + +async def handle_get_library(call: ServiceCall) -> ServiceResponse: + """Handle get_library action.""" + mass = get_music_assistant_client(call.hass, call.data[ATTR_CONFIG_ENTRY_ID]) + media_type = call.data[ATTR_MEDIA_TYPE] + limit = call.data.get(ATTR_LIMIT, DEFAULT_LIMIT) + offset = call.data.get(ATTR_OFFSET, DEFAULT_OFFSET) + order_by = call.data.get(ATTR_ORDER_BY, DEFAULT_SORT_ORDER) + base_params = { + "favorite": call.data.get(ATTR_FAVORITE), + "search": call.data.get(ATTR_SEARCH), + "limit": limit, + "offset": offset, + "order_by": order_by, + } + if media_type == MediaType.ALBUM: + library_result = await mass.music.get_library_albums( + **base_params, + album_types=call.data.get(ATTR_ALBUM_TYPE), + ) + elif media_type == MediaType.ARTIST: + library_result = await mass.music.get_library_artists( + **base_params, + album_artists_only=call.data.get(ATTR_ALBUM_ARTISTS_ONLY), + ) + elif media_type == MediaType.TRACK: + library_result = await mass.music.get_library_tracks( + **base_params, + ) + elif media_type == MediaType.RADIO: + library_result = await mass.music.get_library_radios( + **base_params, + ) + elif media_type == MediaType.PLAYLIST: + library_result = await mass.music.get_library_playlists( + **base_params, + ) + else: + raise ServiceValidationError(f"Unsupported media type {media_type}") + + response: ServiceResponse = LIBRARY_RESULTS_SCHEMA( + { + ATTR_ITEMS: [ + media_item_dict_from_mass_item(mass, item) for item in library_result + ], + ATTR_LIMIT: limit, + ATTR_OFFSET: offset, + ATTR_ORDER_BY: order_by, + ATTR_MEDIA_TYPE: media_type, + } + ) + return response diff --git a/homeassistant/components/music_assistant/const.py b/homeassistant/components/music_assistant/const.py index 6512f58b96c..1980c495278 100644 --- a/homeassistant/components/music_assistant/const.py +++ b/homeassistant/components/music_assistant/const.py @@ -14,5 +14,55 @@ ATTR_GROUP_PARENTS = "group_parents" ATTR_MASS_PLAYER_TYPE = "mass_player_type" ATTR_ACTIVE_QUEUE = "active_queue" ATTR_STREAM_TITLE = "stream_title" +ATTR_MEDIA_TYPE = "media_type" +ATTR_SEARCH_NAME = "name" +ATTR_SEARCH_ARTIST = "artist" +ATTR_SEARCH_ALBUM = "album" +ATTR_LIMIT = "limit" +ATTR_LIBRARY_ONLY = "library_only" +ATTR_FAVORITE = "favorite" +ATTR_SEARCH = "search" +ATTR_OFFSET = "offset" +ATTR_ORDER_BY = "order_by" +ATTR_ALBUM_TYPE = "album_type" +ATTR_ALBUM_ARTISTS_ONLY = "album_artists_only" +ATTR_CONFIG_ENTRY_ID = "config_entry_id" +ATTR_URI = "uri" +ATTR_IMAGE = "image" +ATTR_VERSION = "version" +ATTR_ARTISTS = "artists" +ATTR_ALBUMS = "albums" +ATTR_TRACKS = "tracks" +ATTR_PLAYLISTS = "playlists" +ATTR_RADIO = "radio" +ATTR_ITEMS = "items" +ATTR_RADIO_MODE = "radio_mode" +ATTR_MEDIA_ID = "media_id" +ATTR_ARTIST = "artist" +ATTR_ALBUM = "album" +ATTR_URL = "url" +ATTR_USE_PRE_ANNOUNCE = "use_pre_announce" +ATTR_ANNOUNCE_VOLUME = "announce_volume" +ATTR_SOURCE_PLAYER = "source_player" +ATTR_AUTO_PLAY = "auto_play" +ATTR_QUEUE_ID = "queue_id" +ATTR_ACTIVE = "active" +ATTR_SHUFFLE_ENABLED = "shuffle_enabled" +ATTR_REPEAT_MODE = "repeat_mode" +ATTR_CURRENT_INDEX = "current_index" +ATTR_ELAPSED_TIME = "elapsed_time" +ATTR_CURRENT_ITEM = "current_item" +ATTR_NEXT_ITEM = "next_item" +ATTR_QUEUE_ITEM_ID = "queue_item_id" +ATTR_DURATION = "duration" +ATTR_MEDIA_ITEM = "media_item" +ATTR_STREAM_DETAILS = "stream_details" +ATTR_CONTENT_TYPE = "content_type" +ATTR_SAMPLE_RATE = "sample_rate" +ATTR_BIT_DEPTH = "bit_depth" +ATTR_STREAM_TITLE = "stream_title" +ATTR_PROVIDER = "provider" +ATTR_ITEM_ID = "item_id" + LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/music_assistant/icons.json b/homeassistant/components/music_assistant/icons.json index 7533dbb6dad..0fa64b8d273 100644 --- a/homeassistant/components/music_assistant/icons.json +++ b/homeassistant/components/music_assistant/icons.json @@ -2,6 +2,9 @@ "services": { "play_media": { "service": "mdi:play" }, "play_announcement": { "service": "mdi:bullhorn" }, - "transfer_queue": { "service": "mdi:transfer" } + "transfer_queue": { "service": "mdi:transfer" }, + "search": { "service": "mdi:magnify" }, + "get_queue": { "service": "mdi:playlist-music" }, + "get_library": { "service": "mdi:music-box-multiple" } } } diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index 7004f09aad5..9aa7498a2ee 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -36,8 +36,8 @@ from homeassistant.components.media_player import ( RepeatMode, async_process_play_media_url, ) -from homeassistant.const import STATE_OFF -from homeassistant.core import HomeAssistant +from homeassistant.const import ATTR_NAME, STATE_OFF +from homeassistant.core import HomeAssistant, ServiceResponse, SupportsResponse from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er import homeassistant.helpers.config_validation as cv @@ -48,9 +48,33 @@ from homeassistant.helpers.entity_platform import ( from homeassistant.util.dt import utc_from_timestamp from . import MusicAssistantConfigEntry -from .const import ATTR_ACTIVE_QUEUE, ATTR_MASS_PLAYER_TYPE, DOMAIN +from .const import ( + ATTR_ACTIVE, + ATTR_ACTIVE_QUEUE, + ATTR_ALBUM, + ATTR_ANNOUNCE_VOLUME, + ATTR_ARTIST, + ATTR_AUTO_PLAY, + ATTR_CURRENT_INDEX, + ATTR_CURRENT_ITEM, + ATTR_ELAPSED_TIME, + ATTR_ITEMS, + ATTR_MASS_PLAYER_TYPE, + ATTR_MEDIA_ID, + ATTR_MEDIA_TYPE, + ATTR_NEXT_ITEM, + ATTR_QUEUE_ID, + ATTR_RADIO_MODE, + ATTR_REPEAT_MODE, + ATTR_SHUFFLE_ENABLED, + ATTR_SOURCE_PLAYER, + ATTR_URL, + ATTR_USE_PRE_ANNOUNCE, + DOMAIN, +) from .entity import MusicAssistantEntity from .media_browser import async_browse_media +from .schemas import QUEUE_DETAILS_SCHEMA, queue_item_dict_from_mass_item if TYPE_CHECKING: from music_assistant_client import MusicAssistantClient @@ -89,16 +113,7 @@ QUEUE_OPTION_MAP = { SERVICE_PLAY_MEDIA_ADVANCED = "play_media" SERVICE_PLAY_ANNOUNCEMENT = "play_announcement" SERVICE_TRANSFER_QUEUE = "transfer_queue" -ATTR_RADIO_MODE = "radio_mode" -ATTR_MEDIA_ID = "media_id" -ATTR_MEDIA_TYPE = "media_type" -ATTR_ARTIST = "artist" -ATTR_ALBUM = "album" -ATTR_URL = "url" -ATTR_USE_PRE_ANNOUNCE = "use_pre_announce" -ATTR_ANNOUNCE_VOLUME = "announce_volume" -ATTR_SOURCE_PLAYER = "source_player" -ATTR_AUTO_PLAY = "auto_play" +SERVICE_GET_QUEUE = "get_queue" def catch_musicassistant_error[_R, **P]( @@ -179,6 +194,12 @@ async def async_setup_entry( }, "_async_handle_transfer_queue", ) + platform.async_register_entity_service( + SERVICE_GET_QUEUE, + schema=None, + func="_async_handle_get_queue", + supports_response=SupportsResponse.ONLY, + ) class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): @@ -513,6 +534,32 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): source_queue_id, target_queue_id, auto_play ) + @catch_musicassistant_error + async def _async_handle_get_queue(self) -> ServiceResponse: + """Handle get_queue action.""" + if not self.active_queue: + raise HomeAssistantError("No active queue found") + active_queue = self.active_queue + response: ServiceResponse = QUEUE_DETAILS_SCHEMA( + { + ATTR_QUEUE_ID: active_queue.queue_id, + ATTR_ACTIVE: active_queue.active, + ATTR_NAME: active_queue.display_name, + ATTR_ITEMS: active_queue.items, + ATTR_SHUFFLE_ENABLED: active_queue.shuffle_enabled, + ATTR_REPEAT_MODE: active_queue.repeat_mode.value, + ATTR_CURRENT_INDEX: active_queue.current_index, + ATTR_ELAPSED_TIME: active_queue.corrected_elapsed_time, + ATTR_CURRENT_ITEM: queue_item_dict_from_mass_item( + self.mass, active_queue.current_item + ), + ATTR_NEXT_ITEM: queue_item_dict_from_mass_item( + self.mass, active_queue.next_item + ), + } + ) + return response + async def async_browse_media( self, media_content_type: MediaType | str | None = None, diff --git a/homeassistant/components/music_assistant/schemas.py b/homeassistant/components/music_assistant/schemas.py new file mode 100644 index 00000000000..9caae2ee0b4 --- /dev/null +++ b/homeassistant/components/music_assistant/schemas.py @@ -0,0 +1,182 @@ +"""Voluptuous schemas for Music Assistant integration service responses.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from music_assistant_models.enums import MediaType +import voluptuous as vol + +from homeassistant.const import ATTR_NAME +import homeassistant.helpers.config_validation as cv + +from .const import ( + ATTR_ACTIVE, + ATTR_ALBUM, + ATTR_ALBUMS, + ATTR_ARTISTS, + ATTR_BIT_DEPTH, + ATTR_CONTENT_TYPE, + ATTR_CURRENT_INDEX, + ATTR_CURRENT_ITEM, + ATTR_DURATION, + ATTR_ELAPSED_TIME, + ATTR_IMAGE, + ATTR_ITEM_ID, + ATTR_ITEMS, + ATTR_LIMIT, + ATTR_MEDIA_ITEM, + ATTR_MEDIA_TYPE, + ATTR_NEXT_ITEM, + ATTR_OFFSET, + ATTR_ORDER_BY, + ATTR_PLAYLISTS, + ATTR_PROVIDER, + ATTR_QUEUE_ID, + ATTR_QUEUE_ITEM_ID, + ATTR_RADIO, + ATTR_REPEAT_MODE, + ATTR_SAMPLE_RATE, + ATTR_SHUFFLE_ENABLED, + ATTR_STREAM_DETAILS, + ATTR_STREAM_TITLE, + ATTR_TRACKS, + ATTR_URI, + ATTR_VERSION, +) + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + from music_assistant_models.media_items import ItemMapping, MediaItemType + from music_assistant_models.queue_item import QueueItem + +MEDIA_ITEM_SCHEMA = vol.Schema( + { + vol.Required(ATTR_MEDIA_TYPE): vol.Coerce(MediaType), + vol.Required(ATTR_URI): cv.string, + vol.Required(ATTR_NAME): cv.string, + vol.Required(ATTR_VERSION): cv.string, + vol.Optional(ATTR_IMAGE, default=None): vol.Any(None, cv.string), + vol.Optional(ATTR_ARTISTS): [vol.Self], + vol.Optional(ATTR_ALBUM): vol.Self, + } +) + + +def media_item_dict_from_mass_item( + mass: MusicAssistantClient, + item: MediaItemType | ItemMapping | None, +) -> dict[str, Any] | None: + """Parse a Music Assistant MediaItem.""" + if not item: + return None + base = { + ATTR_MEDIA_TYPE: item.media_type, + ATTR_URI: item.uri, + ATTR_NAME: item.name, + ATTR_VERSION: item.version, + ATTR_IMAGE: mass.get_media_item_image_url(item), + } + if artists := getattr(item, "artists", None): + base[ATTR_ARTISTS] = [media_item_dict_from_mass_item(mass, x) for x in artists] + if album := getattr(item, "album", None): + base[ATTR_ALBUM] = media_item_dict_from_mass_item(mass, album) + return base + + +SEARCH_RESULT_SCHEMA = vol.Schema( + { + vol.Required(ATTR_ARTISTS): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + vol.Required(ATTR_ALBUMS): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + vol.Required(ATTR_TRACKS): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + vol.Required(ATTR_PLAYLISTS): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + vol.Required(ATTR_RADIO): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + }, +) + +LIBRARY_RESULTS_SCHEMA = vol.Schema( + { + vol.Required(ATTR_ITEMS): vol.All( + cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)] + ), + vol.Required(ATTR_LIMIT): int, + vol.Required(ATTR_OFFSET): int, + vol.Required(ATTR_ORDER_BY): str, + vol.Required(ATTR_MEDIA_TYPE): vol.Coerce(MediaType), + } +) + +AUDIO_FORMAT_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONTENT_TYPE): str, + vol.Required(ATTR_SAMPLE_RATE): int, + vol.Required(ATTR_BIT_DEPTH): int, + vol.Required(ATTR_PROVIDER): str, + vol.Required(ATTR_ITEM_ID): str, + } +) + +QUEUE_ITEM_SCHEMA = vol.Schema( + { + vol.Required(ATTR_QUEUE_ITEM_ID): cv.string, + vol.Required(ATTR_NAME): cv.string, + vol.Optional(ATTR_DURATION, default=None): vol.Any(None, int), + vol.Optional(ATTR_MEDIA_ITEM, default=None): vol.Any( + None, vol.Schema(MEDIA_ITEM_SCHEMA) + ), + vol.Optional(ATTR_STREAM_DETAILS): vol.Schema(AUDIO_FORMAT_SCHEMA), + vol.Optional(ATTR_STREAM_TITLE, default=None): vol.Any(None, cv.string), + } +) + + +def queue_item_dict_from_mass_item( + mass: MusicAssistantClient, + item: QueueItem | None, +) -> dict[str, Any] | None: + """Parse a Music Assistant QueueItem.""" + if not item: + return None + base = { + ATTR_QUEUE_ITEM_ID: item.queue_item_id, + ATTR_NAME: item.name, + ATTR_DURATION: item.duration, + ATTR_MEDIA_ITEM: media_item_dict_from_mass_item(mass, item.media_item), + } + if streamdetails := item.streamdetails: + base[ATTR_STREAM_TITLE] = streamdetails.stream_title + base[ATTR_STREAM_DETAILS] = { + ATTR_CONTENT_TYPE: streamdetails.audio_format.content_type.value, + ATTR_SAMPLE_RATE: streamdetails.audio_format.sample_rate, + ATTR_BIT_DEPTH: streamdetails.audio_format.bit_depth, + ATTR_PROVIDER: streamdetails.provider, + ATTR_ITEM_ID: streamdetails.item_id, + } + + return base + + +QUEUE_DETAILS_SCHEMA = vol.Schema( + { + vol.Required(ATTR_QUEUE_ID): str, + vol.Required(ATTR_ACTIVE): bool, + vol.Required(ATTR_NAME): str, + vol.Required(ATTR_ITEMS): int, + vol.Required(ATTR_SHUFFLE_ENABLED): bool, + vol.Required(ATTR_REPEAT_MODE): str, + vol.Required(ATTR_CURRENT_INDEX): vol.Any(None, int), + vol.Required(ATTR_ELAPSED_TIME): vol.Coerce(int), + vol.Required(ATTR_CURRENT_ITEM): vol.Any(None, QUEUE_ITEM_SCHEMA), + vol.Required(ATTR_NEXT_ITEM): vol.Any(None, QUEUE_ITEM_SCHEMA), + } +) diff --git a/homeassistant/components/music_assistant/services.yaml b/homeassistant/components/music_assistant/services.yaml index 00f895c4ef6..73e8e2d7521 100644 --- a/homeassistant/components/music_assistant/services.yaml +++ b/homeassistant/components/music_assistant/services.yaml @@ -88,3 +88,146 @@ transfer_queue: example: "true" selector: boolean: + +get_queue: + target: + entity: + domain: media_player + integration: music_assistant + supported_features: + - media_player.MediaPlayerEntityFeature.PLAY_MEDIA + +search: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: music_assistant + name: + required: true + example: "We Are The Champions" + selector: + text: + media_type: + example: "playlist" + selector: + select: + multiple: true + translation_key: media_type + options: + - artist + - album + - playlist + - track + - radio + artist: + example: "Queen" + selector: + text: + album: + example: "News of the world" + selector: + text: + limit: + advanced: true + example: 25 + default: 5 + selector: + number: + min: 1 + max: 100 + step: 1 + library_only: + example: "true" + default: false + selector: + boolean: + +get_library: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: music_assistant + media_type: + required: true + example: "playlist" + selector: + select: + translation_key: media_type + options: + - artist + - album + - playlist + - track + - radio + favorite: + example: "true" + default: false + selector: + boolean: + search: + example: "We Are The Champions" + selector: + text: + limit: + advanced: true + example: 25 + default: 25 + selector: + number: + min: 1 + max: 500 + step: 1 + offset: + advanced: true + example: 25 + default: 0 + selector: + number: + min: 1 + max: 1000000 + step: 1 + order_by: + example: "random" + selector: + select: + translation_key: order_by + options: + - name + - name_desc + - sort_name + - sort_name_desc + - timestamp_added + - timestamp_added_desc + - last_played + - last_played_desc + - play_count + - play_count_desc + - year + - year_desc + - position + - position_desc + - artist_name + - artist_name_desc + - random + - random_play_count + album_type: + example: "single" + selector: + select: + multiple: true + translation_key: album_type + options: + - album + - single + - compilation + - ep + - unknown + album_artists_only: + example: "true" + default: false + selector: + boolean: diff --git a/homeassistant/components/music_assistant/strings.json b/homeassistant/components/music_assistant/strings.json index cce7f9607c2..af366c94310 100644 --- a/homeassistant/components/music_assistant/strings.json +++ b/homeassistant/components/music_assistant/strings.json @@ -99,6 +99,86 @@ "description": "Start playing the queue on the target player. Omit to use the default behavior." } } + }, + "get_queue": { + "name": "Get playerQueue details (advanced)", + "description": "Get the details of the currently active queue of a Music Assistant player." + }, + "search": { + "name": "Search Music Assistant", + "description": "Perform a global search on the Music Assistant library and all providers.", + "fields": { + "config_entry_id": { + "name": "Music Assistant instance", + "description": "Select the Music Assistant instance to perform the search on." + }, + "name": { + "name": "Search name", + "description": "The name/title to search for." + }, + "media_type": { + "name": "Media type(s)", + "description": "The type of the content to search. Such as artist, album, track, radio, or playlist. All types if omitted." + }, + "artist": { + "name": "Artist name", + "description": "When specifying a track or album name in the name field, you can optionally restrict results by this artist name." + }, + "album": { + "name": "Album name", + "description": "When specifying a track name in the name field, you can optionally restrict results by this album name." + }, + "limit": { + "name": "Limit", + "description": "Maximum number of items to return (per media type)." + }, + "library_only": { + "name": "Only library items", + "description": "Only include results that are in the library." + } + } + }, + "get_library": { + "name": "Get Library items", + "description": "Get items from a Music Assistant library.", + "fields": { + "config_entry_id": { + "name": "[%key:component::music_assistant::services::search::fields::config_entry_id::name%]", + "description": "[%key:component::music_assistant::services::search::fields::config_entry_id::description%]" + }, + "media_type": { + "name": "Media type", + "description": "The media type for which to request details for." + }, + "favorite": { + "name": "Favorites only", + "description": "Filter items so only favorites items are returned." + }, + "search": { + "name": "Search", + "description": "Optional search string to search through this library." + }, + "limit": { + "name": "Limit", + "description": "Maximum number of items to return." + }, + "offset": { + "name": "Offset", + "description": "Offset to start the list from." + }, + "order_by": { + "name": "Order By", + "description": "Sort the list by this field." + }, + "album_type": { + "name": "Album type filter (albums library only)", + "description": "Filter albums by type." + }, + "album_artists_only": { + "name": "Enable album artists filter (only for artist library)", + "description": "Only return Album Artists when listing the Artists library items." + } + } } }, "selector": { @@ -119,6 +199,37 @@ "playlist": "Playlist", "radio": "Radio" } + }, + "order_by": { + "options": { + "name": "Name", + "name_desc": "Name (desc)", + "sort_name": "Sort name", + "sort_name_desc": "Sort name (desc)", + "timestamp_added": "Added", + "timestamp_added_desc": "Added (desc)", + "last_played": "Last played", + "last_played_desc": "Last played (desc)", + "play_count": "Play count", + "play_count_desc": "Play count (desc)", + "year": "Year", + "year_desc": "Year (desc)", + "position": "Position", + "position_desc": "Position (desc)", + "artist_name": "Artist name", + "artist_name_desc": "Artist name (desc)", + "random": "Random", + "random_play_count": "Random + least played" + } + }, + "album_type": { + "options": { + "album": "Album", + "single": "Single", + "ep": "EP", + "compilation": "Compilation", + "unknown": "Unknown" + } } } } diff --git a/tests/components/music_assistant/common.py b/tests/components/music_assistant/common.py index c8293b5622f..7c0f9df751a 100644 --- a/tests/components/music_assistant/common.py +++ b/tests/components/music_assistant/common.py @@ -30,7 +30,7 @@ def load_and_parse_fixture(fixture: str) -> dict[str, Any]: async def setup_integration_from_fixtures( hass: HomeAssistant, music_assistant_client: MagicMock, -) -> None: +) -> MockConfigEntry: """Set up MusicAssistant integration with fixture data.""" players = create_players_from_fixture() music_assistant_client.players._players = {x.player_id: x for x in players} @@ -65,6 +65,7 @@ async def setup_integration_from_fixtures( config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + return config_entry def create_players_from_fixture() -> list[Player]: diff --git a/tests/components/music_assistant/snapshots/test_actions.ambr b/tests/components/music_assistant/snapshots/test_actions.ambr new file mode 100644 index 00000000000..6c30ffc512c --- /dev/null +++ b/tests/components/music_assistant/snapshots/test_actions.ambr @@ -0,0 +1,202 @@ +# serializer version: 1 +# name: test_get_library_action + dict({ + 'items': list([ + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Traveller', + 'uri': 'library://album/463', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Chris Stapleton', + 'uri': 'library://artist/433', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Tennessee Whiskey', + 'uri': 'library://track/456', + 'version': '', + }), + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Thelma + Louise', + 'uri': 'library://album/471', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Bastille', + 'uri': 'library://artist/81', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Thelma + Louise', + 'uri': 'library://track/467', + 'version': '', + }), + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'HIStory - PAST, PRESENT AND FUTURE - BOOK I', + 'uri': 'library://album/486', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Michael Jackson', + 'uri': 'library://artist/30', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': "They Don't Care About Us", + 'uri': 'library://track/485', + 'version': '', + }), + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Better Dayz', + 'uri': 'library://album/487', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': '2Pac', + 'uri': 'library://artist/159', + 'version': '', + }), + dict({ + 'image': None, + 'media_type': , + 'name': 'The Outlawz', + 'uri': 'library://artist/451', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': "They Don't Give A F**** About Us", + 'uri': 'library://track/486', + 'version': '', + }), + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Things We Lost In The Fire', + 'uri': 'library://album/488', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Bastille', + 'uri': 'library://artist/81', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Things We Lost In The Fire', + 'uri': 'library://track/487', + 'version': 'TORN Remix', + }), + dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Doom Days', + 'uri': 'library://album/489', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Bastille', + 'uri': 'library://artist/81', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Those Nights', + 'uri': 'library://track/488', + 'version': '', + }), + ]), + 'limit': 25, + 'media_type': , + 'offset': 0, + 'order_by': 'name', + }) +# --- +# name: test_search_action + dict({ + 'albums': list([ + dict({ + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'A Space Love Adventure', + 'uri': 'library://artist/289', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Synth Punk EP', + 'uri': 'library://album/396', + 'version': '', + }), + dict({ + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'Various Artists', + 'uri': 'library://artist/96', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Synthwave (The 80S Revival)', + 'uri': 'library://album/95', + 'version': 'The 80S Revival', + }), + ]), + 'artists': list([ + ]), + 'playlists': list([ + ]), + 'radio': list([ + ]), + 'tracks': list([ + ]), + }) +# --- diff --git a/tests/components/music_assistant/snapshots/test_media_player.ambr b/tests/components/music_assistant/snapshots/test_media_player.ambr index e3d7a4a0cbc..6c5389dbd6a 100644 --- a/tests/components/music_assistant/snapshots/test_media_player.ambr +++ b/tests/components/music_assistant/snapshots/test_media_player.ambr @@ -188,3 +188,88 @@ 'state': 'off', }) # --- +# name: test_media_player_get_queue_action + dict({ + 'media_player.test_group_player_1': dict({ + 'active': True, + 'current_index': 26, + 'current_item': dict({ + 'duration': 536, + 'media_item': dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'Use Your Illusion I', + 'uri': 'spotify://album/0CxPbTRARqKUYighiEY9Sz', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': "Guns N' Roses", + 'uri': 'spotify://artist/3qm84nBOXUEQ2vnTfUTTFC', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'November Rain', + 'uri': 'spotify://track/3YRCqOhFifThpSRFJ1VWFM', + 'version': '', + }), + 'name': "Guns N' Roses - November Rain", + 'queue_item_id': '5d95dc5be77e4f7eb4939f62cfef527b', + 'stream_details': dict({ + 'bit_depth': 16, + 'content_type': 'ogg', + 'item_id': '3YRCqOhFifThpSRFJ1VWFM', + 'provider': 'spotify', + 'sample_rate': 44100, + }), + 'stream_title': None, + }), + 'items': 1094, + 'name': 'Test Group Player 1', + 'next_item': dict({ + 'duration': 207, + 'media_item': dict({ + 'album': dict({ + 'image': None, + 'media_type': , + 'name': 'La Folie', + 'uri': 'qobuz://album/0724353468859', + 'version': '', + }), + 'artists': list([ + dict({ + 'image': None, + 'media_type': , + 'name': 'The Stranglers', + 'uri': 'qobuz://artist/26779', + 'version': '', + }), + ]), + 'image': None, + 'media_type': , + 'name': 'Golden Brown', + 'uri': 'qobuz://track/1004735', + 'version': '', + }), + 'name': 'The Stranglers - Golden Brown', + 'queue_item_id': '990ae8f29cdf4fb588d679b115621f55', + 'stream_details': dict({ + 'bit_depth': 16, + 'content_type': 'flac', + 'item_id': '1004735', + 'provider': 'qobuz', + 'sample_rate': 44100, + }), + 'stream_title': None, + }), + 'queue_id': 'test_group_player_1', + 'repeat_mode': 'all', + 'shuffle_enabled': True, + }), + }) +# --- diff --git a/tests/components/music_assistant/test_actions.py b/tests/components/music_assistant/test_actions.py new file mode 100644 index 00000000000..4d3917091c1 --- /dev/null +++ b/tests/components/music_assistant/test_actions.py @@ -0,0 +1,68 @@ +"""Test Music Assistant actions.""" + +from unittest.mock import AsyncMock, MagicMock + +from music_assistant_models.media_items import SearchResults +from syrupy import SnapshotAssertion + +from homeassistant.components.music_assistant.actions import ( + SERVICE_GET_LIBRARY, + SERVICE_SEARCH, +) +from homeassistant.components.music_assistant.const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_FAVORITE, + ATTR_MEDIA_TYPE, + ATTR_SEARCH_NAME, + DOMAIN as MASS_DOMAIN, +) +from homeassistant.core import HomeAssistant + +from .common import create_library_albums_from_fixture, setup_integration_from_fixtures + + +async def test_search_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test music assistant search action.""" + entry = await setup_integration_from_fixtures(hass, music_assistant_client) + + music_assistant_client.music.search = AsyncMock( + return_value=SearchResults( + albums=create_library_albums_from_fixture(), + ) + ) + response = await hass.services.async_call( + MASS_DOMAIN, + SERVICE_SEARCH, + { + ATTR_CONFIG_ENTRY_ID: entry.entry_id, + ATTR_SEARCH_NAME: "test", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot + + +async def test_get_library_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test music assistant get_library action.""" + entry = await setup_integration_from_fixtures(hass, music_assistant_client) + response = await hass.services.async_call( + MASS_DOMAIN, + SERVICE_GET_LIBRARY, + { + ATTR_CONFIG_ENTRY_ID: entry.entry_id, + ATTR_FAVORITE: False, + ATTR_MEDIA_TYPE: "track", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot diff --git a/tests/components/music_assistant/test_media_player.py b/tests/components/music_assistant/test_media_player.py index 13716b6a479..25dfcd22c72 100644 --- a/tests/components/music_assistant/test_media_player.py +++ b/tests/components/music_assistant/test_media_player.py @@ -6,6 +6,7 @@ from music_assistant_models.enums import MediaType, QueueOption from music_assistant_models.media_items import Track import pytest from syrupy import SnapshotAssertion +from syrupy.filters import paths from homeassistant.components.media_player import ( ATTR_GROUP_MEMBERS, @@ -32,6 +33,7 @@ from homeassistant.components.music_assistant.media_player import ( ATTR_SOURCE_PLAYER, ATTR_URL, ATTR_USE_PRE_ANNOUNCE, + SERVICE_GET_QUEUE, SERVICE_PLAY_ANNOUNCEMENT, SERVICE_PLAY_MEDIA_ADVANCED, SERVICE_TRANSFER_QUEUE, @@ -583,3 +585,25 @@ async def test_media_player_transfer_queue_action( auto_play=None, require_schema=25, ) + + +async def test_media_player_get_queue_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test media_player get_queue action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_group_player_1" + response = await hass.services.async_call( + MASS_DOMAIN, + SERVICE_GET_QUEUE, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + return_response=True, + ) + # no call is made, this info comes from the cached queue data + assert music_assistant_client.send_command.call_count == 0 + assert response == snapshot(exclude=paths(f"{entity_id}.elapsed_time")) From ed7da35de4de633c5cdf1578e143dbaf9c06b492 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 23 Dec 2024 11:11:25 +0100 Subject: [PATCH 671/677] Add coordinator error handling for Peblar Rocksolid EV Chargers (#133809) --- .../components/peblar/coordinator.py | 84 +++++++++---- tests/components/peblar/test_coordinator.py | 119 ++++++++++++++++++ 2 files changed, 182 insertions(+), 21 deletions(-) create mode 100644 tests/components/peblar/test_coordinator.py diff --git a/homeassistant/components/peblar/coordinator.py b/homeassistant/components/peblar/coordinator.py index 4afc544cc1d..398788f1f9f 100644 --- a/homeassistant/components/peblar/coordinator.py +++ b/homeassistant/components/peblar/coordinator.py @@ -2,12 +2,16 @@ from __future__ import annotations +from collections.abc import Callable, Coroutine from dataclasses import dataclass from datetime import timedelta +from typing import Any, Concatenate from peblar import ( Peblar, PeblarApi, + PeblarAuthenticationError, + PeblarConnectionError, PeblarError, PeblarEVInterface, PeblarMeter, @@ -16,12 +20,13 @@ from peblar import ( PeblarVersions, ) -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from tests.components.peblar.conftest import PeblarSystemInformation -from .const import LOGGER +from .const import DOMAIN, LOGGER @dataclass(kw_only=True) @@ -59,6 +64,49 @@ class PeblarData: system: PeblarSystem +def _coordinator_exception_handler[ + _DataUpdateCoordinatorT: PeblarDataUpdateCoordinator + | PeblarVersionDataUpdateCoordinator + | PeblarUserConfigurationDataUpdateCoordinator, + **_P, +]( + func: Callable[Concatenate[_DataUpdateCoordinatorT, _P], Coroutine[Any, Any, Any]], +) -> Callable[Concatenate[_DataUpdateCoordinatorT, _P], Coroutine[Any, Any, Any]]: + """Handle exceptions within the update handler of a coordinator.""" + + async def handler( + self: _DataUpdateCoordinatorT, *args: _P.args, **kwargs: _P.kwargs + ) -> Any: + try: + return await func(self, *args, **kwargs) + except PeblarAuthenticationError as error: + if self.config_entry and self.config_entry.state is ConfigEntryState.LOADED: + # This is not the first refresh, so let's reload + # the config entry to ensure we trigger a re-authentication + # flow (or recover in case of API token changes). + self.hass.config_entries.async_schedule_reload( + self.config_entry.entry_id + ) + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="authentication_error", + ) from error + except PeblarConnectionError as error: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="communication_error", + translation_placeholders={"error": str(error)}, + ) from error + except PeblarError as error: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="unknown_error", + translation_placeholders={"error": str(error)}, + ) from error + + return handler + + class PeblarVersionDataUpdateCoordinator( DataUpdateCoordinator[PeblarVersionInformation] ): @@ -77,15 +125,13 @@ class PeblarVersionDataUpdateCoordinator( update_interval=timedelta(hours=2), ) + @_coordinator_exception_handler async def _async_update_data(self) -> PeblarVersionInformation: """Fetch data from the Peblar device.""" - try: - return PeblarVersionInformation( - current=await self.peblar.current_versions(), - available=await self.peblar.available_versions(), - ) - except PeblarError as err: - raise UpdateFailed(err) from err + return PeblarVersionInformation( + current=await self.peblar.current_versions(), + available=await self.peblar.available_versions(), + ) class PeblarDataUpdateCoordinator(DataUpdateCoordinator[PeblarData]): @@ -104,16 +150,14 @@ class PeblarDataUpdateCoordinator(DataUpdateCoordinator[PeblarData]): update_interval=timedelta(seconds=10), ) + @_coordinator_exception_handler async def _async_update_data(self) -> PeblarData: """Fetch data from the Peblar device.""" - try: - return PeblarData( - ev=await self.api.ev_interface(), - meter=await self.api.meter(), - system=await self.api.system(), - ) - except PeblarError as err: - raise UpdateFailed(err) from err + return PeblarData( + ev=await self.api.ev_interface(), + meter=await self.api.meter(), + system=await self.api.system(), + ) class PeblarUserConfigurationDataUpdateCoordinator( @@ -134,9 +178,7 @@ class PeblarUserConfigurationDataUpdateCoordinator( update_interval=timedelta(minutes=5), ) + @_coordinator_exception_handler async def _async_update_data(self) -> PeblarUserConfiguration: """Fetch data from the Peblar device.""" - try: - return await self.peblar.user_configuration() - except PeblarError as err: - raise UpdateFailed(err) from err + return await self.peblar.user_configuration() diff --git a/tests/components/peblar/test_coordinator.py b/tests/components/peblar/test_coordinator.py new file mode 100644 index 00000000000..f438d807920 --- /dev/null +++ b/tests/components/peblar/test_coordinator.py @@ -0,0 +1,119 @@ +"""Tests for the Peblar coordinators.""" + +from datetime import timedelta +from unittest.mock import MagicMock + +from freezegun.api import FrozenDateTimeFactory +from peblar import PeblarAuthenticationError, PeblarConnectionError, PeblarError +import pytest + +from homeassistant.components.peblar.const import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, async_fire_time_changed + +pytestmark = [ + pytest.mark.parametrize("init_integration", [Platform.SENSOR], indirect=True), + pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration"), +] + + +@pytest.mark.parametrize( + ("error", "log_message"), + [ + ( + PeblarConnectionError("Could not connect"), + ( + "An error occurred while communicating with the Peblar device: " + "Could not connect" + ), + ), + ( + PeblarError("Unknown error"), + ( + "An unknown error occurred while communicating " + "with the Peblar device: Unknown error" + ), + ), + ], +) +async def test_coordinator_error_handler( + hass: HomeAssistant, + mock_peblar: MagicMock, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, + error: Exception, + log_message: str, +) -> None: + """Test the coordinators.""" + entity_id = "sensor.peblar_ev_charger_power" + + # Ensure we are set up and the coordinator is working. + # Confirming this through a sensor entity, that is available. + assert (state := hass.states.get(entity_id)) + assert state.state != STATE_UNAVAILABLE + + # Mock an error in the coordinator. + mock_peblar.rest_api.return_value.meter.side_effect = error + freezer.tick(timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Ensure the sensor entity is now unavailable. + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE + + # Ensure the error is logged + assert log_message in caplog.text + + # Recover + mock_peblar.rest_api.return_value.meter.side_effect = None + freezer.tick(timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Ensure the sensor entity is now available. + assert (state := hass.states.get("sensor.peblar_ev_charger_power")) + assert state.state != STATE_UNAVAILABLE + + +async def test_coordinator_error_handler_authentication_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_peblar: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the coordinator error handler with an authentication error.""" + + # Ensure the sensor entity is now available. + assert (state := hass.states.get("sensor.peblar_ev_charger_power")) + assert state.state != STATE_UNAVAILABLE + + # Mock an authentication in the coordinator + mock_peblar.rest_api.return_value.meter.side_effect = PeblarAuthenticationError( + "Authentication error" + ) + mock_peblar.login.side_effect = PeblarAuthenticationError("Authentication error") + freezer.tick(timedelta(seconds=15)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Ensure the sensor entity is now unavailable. + assert (state := hass.states.get("sensor.peblar_ev_charger_power")) + assert state.state == STATE_UNAVAILABLE + + # Ensure we have triggered a reauthentication flow + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == mock_config_entry.entry_id From b2170ad73276e4844bcfa83f97d207e5c196a105 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 23 Dec 2024 11:23:26 +0100 Subject: [PATCH 672/677] Mark Peblar Rocksolid EV Chargers Platinum (#133823) --- homeassistant/components/peblar/manifest.json | 2 +- .../components/peblar/quality_scale.yaml | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/peblar/manifest.json b/homeassistant/components/peblar/manifest.json index 0e3a66dd256..ab5572e66d0 100644 --- a/homeassistant/components/peblar/manifest.json +++ b/homeassistant/components/peblar/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/peblar", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "bronze", + "quality_scale": "platinum", "requirements": ["peblar==0.3.0"], "zeroconf": [{ "type": "_http._tcp.local.", "name": "pblr-*" }] } diff --git a/homeassistant/components/peblar/quality_scale.yaml b/homeassistant/components/peblar/quality_scale.yaml index 9de0031373f..91f9bb7af55 100644 --- a/homeassistant/components/peblar/quality_scale.yaml +++ b/homeassistant/components/peblar/quality_scale.yaml @@ -28,7 +28,7 @@ rules: unique-config-entry: done # Silver - action-exceptions: todo + action-exceptions: done config-entry-unloading: done docs-configuration-parameters: status: exempt @@ -40,19 +40,19 @@ rules: log-when-unavailable: done parallel-updates: done reauthentication-flow: done - test-coverage: todo + test-coverage: done # Gold devices: done diagnostics: done discovery-update-info: done discovery: done - docs-data-update: todo - docs-examples: todo - docs-known-limitations: todo - docs-supported-devices: todo - docs-supported-functions: todo - docs-troubleshooting: todo - docs-use-cases: todo + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done dynamic-devices: status: exempt comment: | From e3cf5c47b220476301463b77f9a86540b9c25efd Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 23 Dec 2024 11:28:20 +0100 Subject: [PATCH 673/677] Add compatibility code for deprecated WaterHeaterEntityEntityDescription (#133351) --- homeassistant/components/water_heater/__init__.py | 8 ++++++++ tests/components/water_heater/test_init.py | 14 ++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index cac0a365f74..60be340a253 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -25,6 +25,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.deprecation import deprecated_class from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.temperature import display_temp as show_temp @@ -133,6 +134,13 @@ class WaterHeaterEntityDescription(EntityDescription, frozen_or_thawed=True): """A class that describes water heater entities.""" +@deprecated_class("WaterHeaterEntityDescription", breaks_in_ha_version="2026.1") +class WaterHeaterEntityEntityDescription( + WaterHeaterEntityDescription, frozen_or_thawed=True +): + """A (deprecated) class that describes water heater entities.""" + + CACHED_PROPERTIES_WITH_ATTR_ = { "temperature_unit", "current_operation", diff --git a/tests/components/water_heater/test_init.py b/tests/components/water_heater/test_init.py index 78efd94ef8e..09a0a711582 100644 --- a/tests/components/water_heater/test_init.py +++ b/tests/components/water_heater/test_init.py @@ -13,6 +13,8 @@ from homeassistant.components.water_heater import ( SERVICE_SET_OPERATION_MODE, SET_TEMPERATURE_SCHEMA, WaterHeaterEntity, + WaterHeaterEntityDescription, + WaterHeaterEntityEntityDescription, WaterHeaterEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -204,3 +206,15 @@ async def test_operation_mode_validation( ) await hass.async_block_till_done() water_heater_entity.set_operation_mode.assert_has_calls([mock.call("eco")]) + + +@pytest.mark.parametrize( + ("class_name", "expected_log"), + [(WaterHeaterEntityDescription, False), (WaterHeaterEntityEntityDescription, True)], +) +async def test_deprecated_entity_description( + caplog: pytest.LogCaptureFixture, class_name: type, expected_log: bool +) -> None: + """Test deprecated WaterHeaterEntityEntityDescription logs warning.""" + class_name(key="test") + assert ("is a deprecated class" in caplog.text) is expected_log From 939365887f7e845082793f79d865973f9b161b54 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 23 Dec 2024 11:35:37 +0100 Subject: [PATCH 674/677] Add coordinator to Twinkly (#133793) --- homeassistant/components/twinkly/__init__.py | 29 +-- .../components/twinkly/coordinator.py | 102 ++++++++ .../components/twinkly/diagnostics.py | 4 +- homeassistant/components/twinkly/light.py | 232 +++++------------- tests/components/twinkly/conftest.py | 8 +- .../twinkly/fixtures/get_saved_movies.json | 16 +- .../twinkly/snapshots/test_diagnostics.ambr | 4 +- .../twinkly/snapshots/test_light.ambr | 6 +- tests/components/twinkly/test_light.py | 28 +++ 9 files changed, 222 insertions(+), 207 deletions(-) create mode 100644 homeassistant/components/twinkly/coordinator.py diff --git a/homeassistant/components/twinkly/__init__.py b/homeassistant/components/twinkly/__init__.py index cd76a79e1d7..aaad731d264 100644 --- a/homeassistant/components/twinkly/__init__.py +++ b/homeassistant/components/twinkly/__init__.py @@ -1,8 +1,6 @@ """The twinkly component.""" -from dataclasses import dataclass import logging -from typing import Any from aiohttp import ClientError from ttls.client import Twinkly @@ -10,27 +8,18 @@ from ttls.client import Twinkly from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import ATTR_VERSION, DOMAIN +from .const import DOMAIN +from .coordinator import TwinklyCoordinator PLATFORMS = [Platform.LIGHT] _LOGGER = logging.getLogger(__name__) -@dataclass -class TwinklyData: - """Data for Twinkly integration.""" - - client: Twinkly - device_info: dict[str, Any] - sw_version: str | None - - -type TwinklyConfigEntry = ConfigEntry[TwinklyData] +type TwinklyConfigEntry = ConfigEntry[TwinklyCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> bool: @@ -41,15 +30,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TwinklyConfigEntry) -> b client = Twinkly(host, async_get_clientsession(hass)) - try: - device_info = await client.get_details() - software_version = await client.get_firmware_version() - except (TimeoutError, ClientError) as exception: - raise ConfigEntryNotReady from exception + coordinator = TwinklyCoordinator(hass, client) - entry.runtime_data = TwinklyData( - client, device_info, software_version.get(ATTR_VERSION) - ) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/twinkly/coordinator.py b/homeassistant/components/twinkly/coordinator.py new file mode 100644 index 00000000000..8a5e3e087ae --- /dev/null +++ b/homeassistant/components/twinkly/coordinator.py @@ -0,0 +1,102 @@ +"""Coordinator for Twinkly.""" + +from dataclasses import dataclass +from datetime import timedelta +import logging +from typing import Any + +from aiohttp import ClientError +from awesomeversion import AwesomeVersion +from ttls.client import Twinkly, TwinklyError + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DEV_NAME, DOMAIN, MIN_EFFECT_VERSION + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class TwinklyData: + """Class for Twinkly data.""" + + device_info: dict[str, Any] + brightness: int + is_on: bool + movies: dict[int, str] + current_movie: int | None + + +class TwinklyCoordinator(DataUpdateCoordinator[TwinklyData]): + """Class to manage fetching Twinkly data from API.""" + + software_version: str + supports_effects: bool + device_name: str + + def __init__(self, hass: HomeAssistant, client: Twinkly) -> None: + """Initialize global Twinkly data updater.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=timedelta(seconds=30), + ) + self.client = client + + async def _async_setup(self) -> None: + """Set up the Twinkly data.""" + try: + software_version = await self.client.get_firmware_version() + self.device_name = (await self.client.get_details())[DEV_NAME] + except (TimeoutError, ClientError) as exception: + raise UpdateFailed from exception + self.software_version = software_version["version"] + self.supports_effects = AwesomeVersion(self.software_version) >= AwesomeVersion( + MIN_EFFECT_VERSION + ) + + async def _async_update_data(self) -> TwinklyData: + """Fetch data from Twinkly.""" + movies: list[dict[str, Any]] = [] + current_movie: dict[str, Any] = {} + try: + device_info = await self.client.get_details() + brightness = await self.client.get_brightness() + is_on = await self.client.is_on() + if self.supports_effects: + movies = (await self.client.get_saved_movies())["movies"] + except (TimeoutError, ClientError) as exception: + raise UpdateFailed from exception + if self.supports_effects: + try: + current_movie = await self.client.get_current_movie() + except (TwinklyError, TimeoutError, ClientError) as exception: + _LOGGER.debug("Error fetching current movie: %s", exception) + brightness = ( + int(brightness["value"]) if brightness["mode"] == "enabled" else 100 + ) + brightness = int(round(brightness * 2.55)) if is_on else 0 + if self.device_name != device_info[DEV_NAME]: + self._async_update_device_info(device_info[DEV_NAME]) + return TwinklyData( + device_info, + brightness, + is_on, + {movie["id"]: movie["name"] for movie in movies}, + current_movie.get("id"), + ) + + def _async_update_device_info(self, name: str) -> None: + """Update the device info.""" + device_registry = dr.async_get(self.hass) + device = device_registry.async_get_device( + identifiers={(DOMAIN, self.data.device_info["mac"])}, + ) + if device: + device_registry.async_update_device( + device.id, + name=name, + ) diff --git a/homeassistant/components/twinkly/diagnostics.py b/homeassistant/components/twinkly/diagnostics.py index 9ddc65cf255..d732ce14929 100644 --- a/homeassistant/components/twinkly/diagnostics.py +++ b/homeassistant/components/twinkly/diagnostics.py @@ -34,8 +34,8 @@ async def async_get_config_entry_diagnostics( return async_redact_data( { "entry": entry.as_dict(), - "device_info": entry.runtime_data.device_info, - ATTR_SW_VERSION: entry.runtime_data.sw_version, + "device_info": entry.runtime_data.data.device_info, + ATTR_SW_VERSION: entry.runtime_data.software_version, "attributes": attributes, }, TO_REDACT, diff --git a/homeassistant/components/twinkly/light.py b/homeassistant/components/twinkly/light.py index 7de07db3b30..1dfd6c1df30 100644 --- a/homeassistant/components/twinkly/light.py +++ b/homeassistant/components/twinkly/light.py @@ -5,9 +5,6 @@ from __future__ import annotations import logging from typing import Any -from aiohttp import ClientError -from awesomeversion import AwesomeVersion - from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_EFFECT, @@ -17,13 +14,12 @@ from homeassistant.components.light import ( LightEntity, LightEntityFeature, ) -from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import TwinklyConfigEntry +from . import TwinklyConfigEntry, TwinklyCoordinator from .const import ( DEV_LED_PROFILE, DEV_MODEL, @@ -31,7 +27,6 @@ from .const import ( DEV_PROFILE_RGB, DEV_PROFILE_RGBW, DOMAIN, - MIN_EFFECT_VERSION, ) _LOGGER = logging.getLogger(__name__) @@ -43,26 +38,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Setups an entity from a config entry (UI config flow).""" - entity = TwinklyLight(config_entry) + entity = TwinklyLight(config_entry.runtime_data) async_add_entities([entity], update_before_add=True) -class TwinklyLight(LightEntity): +class TwinklyLight(CoordinatorEntity[TwinklyCoordinator], LightEntity): """Implementation of the light for the Twinkly service.""" _attr_has_entity_name = True _attr_name = None _attr_translation_key = "light" - def __init__( - self, - entry: TwinklyConfigEntry, - ) -> None: + def __init__(self, coordinator: TwinklyCoordinator) -> None: """Initialize a TwinklyLight entity.""" - device_info = entry.runtime_data.device_info - self._attr_unique_id: str = device_info["mac"] - self._conf = entry + super().__init__(coordinator) + device_info = coordinator.data.device_info + self._attr_unique_id = mac = device_info["mac"] if device_info.get(DEV_LED_PROFILE) == DEV_PROFILE_RGBW: self._attr_supported_color_modes = {ColorMode.RGBW} @@ -75,66 +67,35 @@ class TwinklyLight(LightEntity): else: self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} self._attr_color_mode = ColorMode.BRIGHTNESS - - # Those are saved in the config entry in order to have meaningful values even - # if the device is currently offline. - # They are expected to be updated using the device_info. - self._name = entry.data[CONF_NAME] or "Twinkly light" - self._model = entry.data[CONF_MODEL] - self._mac = device_info["mac"] - - self._client = entry.runtime_data.client - - # Set default state before any update - self._attr_is_on = False - self._attr_available = False - self._current_movie: dict[Any, Any] = {} - self._movies: list[Any] = [] - self._software_version = entry.runtime_data.sw_version - # We guess that most devices are "new" and support effects - self._attr_supported_features = LightEntityFeature.EFFECT - - @property - def device_info(self) -> DeviceInfo | None: - """Get device specific attributes.""" - return DeviceInfo( - identifiers={(DOMAIN, self._mac)}, - connections={(CONNECTION_NETWORK_MAC, self._mac)}, + self.client = coordinator.client + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, mac)}, + connections={(CONNECTION_NETWORK_MAC, mac)}, manufacturer="LEDWORKS", - model=self._model, - name=self._name, - sw_version=self._software_version, + model=device_info[DEV_MODEL], + name=device_info[DEV_NAME], + sw_version=coordinator.software_version, ) + if coordinator.supports_effects: + self._attr_supported_features = LightEntityFeature.EFFECT + self._update_attr() @property def effect(self) -> str | None: """Return the current effect.""" - if "name" in self._current_movie: - return f"{self._current_movie['id']} {self._current_movie['name']}" + if (current_movie_id := self.coordinator.data.current_movie) is not None: + return ( + f"{current_movie_id} {self.coordinator.data.movies[current_movie_id]}" + ) return None @property def effect_list(self) -> list[str]: """Return the list of saved effects.""" - return [f"{movie['id']} {movie['name']}" for movie in self._movies] - - async def async_added_to_hass(self) -> None: - """Device is added to hass.""" - if self._software_version: - if AwesomeVersion(self._software_version) < AwesomeVersion( - MIN_EFFECT_VERSION - ): - self._attr_supported_features = ( - self.supported_features & ~LightEntityFeature.EFFECT - ) - device_registry = dr.async_get(self.hass) - device_entry = device_registry.async_get_device( - {(DOMAIN, self._attr_unique_id)}, set() - ) - if device_entry: - device_registry.async_update_device( - device_entry.id, sw_version=self._software_version - ) + return [ + f"{identifier} {name}" + for identifier, name in self.coordinator.data.movies.items() + ] async def async_turn_on(self, **kwargs: Any) -> None: """Turn device on.""" @@ -144,29 +105,29 @@ class TwinklyLight(LightEntity): # If brightness is 0, the twinkly will only "disable" the brightness, # which means that it will be 100%. if brightness == 0: - await self._client.turn_off() + await self.client.turn_off() return - await self._client.set_brightness(brightness) + await self.client.set_brightness(brightness) if ( ATTR_RGBW_COLOR in kwargs and kwargs[ATTR_RGBW_COLOR] != self._attr_rgbw_color ): - await self._client.interview() + await self.client.interview() if LightEntityFeature.EFFECT & self.supported_features: # Static color only supports rgb - await self._client.set_static_colour( + await self.client.set_static_colour( ( kwargs[ATTR_RGBW_COLOR][0], kwargs[ATTR_RGBW_COLOR][1], kwargs[ATTR_RGBW_COLOR][2], ) ) - await self._client.set_mode("color") - self._client.default_mode = "color" + await self.client.set_mode("color") + self.client.default_mode = "color" else: - await self._client.set_cycle_colours( + await self.client.set_cycle_colours( ( kwargs[ATTR_RGBW_COLOR][3], kwargs[ATTR_RGBW_COLOR][0], @@ -174,20 +135,20 @@ class TwinklyLight(LightEntity): kwargs[ATTR_RGBW_COLOR][2], ) ) - await self._client.set_mode("movie") - self._client.default_mode = "movie" + await self.client.set_mode("movie") + self.client.default_mode = "movie" self._attr_rgbw_color = kwargs[ATTR_RGBW_COLOR] if ATTR_RGB_COLOR in kwargs and kwargs[ATTR_RGB_COLOR] != self._attr_rgb_color: - await self._client.interview() + await self.client.interview() if LightEntityFeature.EFFECT & self.supported_features: - await self._client.set_static_colour(kwargs[ATTR_RGB_COLOR]) - await self._client.set_mode("color") - self._client.default_mode = "color" + await self.client.set_static_colour(kwargs[ATTR_RGB_COLOR]) + await self.client.set_mode("color") + self.client.default_mode = "color" else: - await self._client.set_cycle_colours(kwargs[ATTR_RGB_COLOR]) - await self._client.set_mode("movie") - self._client.default_mode = "movie" + await self.client.set_cycle_colours(kwargs[ATTR_RGB_COLOR]) + await self.client.set_mode("movie") + self.client.default_mode = "movie" self._attr_rgb_color = kwargs[ATTR_RGB_COLOR] @@ -196,100 +157,29 @@ class TwinklyLight(LightEntity): and LightEntityFeature.EFFECT & self.supported_features ): movie_id = kwargs[ATTR_EFFECT].split(" ")[0] - if "id" not in self._current_movie or int(movie_id) != int( - self._current_movie["id"] + if ( + self.coordinator.data.current_movie is None + or int(movie_id) != self.coordinator.data.current_movie ): - await self._client.interview() - await self._client.set_current_movie(int(movie_id)) - await self._client.set_mode("movie") - self._client.default_mode = "movie" + await self.client.interview() + await self.client.set_current_movie(int(movie_id)) + await self.client.set_mode("movie") + self.client.default_mode = "movie" if not self._attr_is_on: - await self._client.turn_on() + await self.client.turn_on() + await self.coordinator.async_refresh() async def async_turn_off(self, **kwargs: Any) -> None: """Turn device off.""" - await self._client.turn_off() + await self.client.turn_off() + await self.coordinator.async_refresh() - async def async_update(self) -> None: - """Asynchronously updates the device properties.""" - _LOGGER.debug("Updating '%s'", self._client.host) + def _update_attr(self) -> None: + """Update the entity attributes.""" + self._attr_is_on = self.coordinator.data.is_on + self._attr_brightness = self.coordinator.data.brightness - try: - self._attr_is_on = await self._client.is_on() - - brightness = await self._client.get_brightness() - brightness_value = ( - int(brightness["value"]) if brightness["mode"] == "enabled" else 100 - ) - - self._attr_brightness = ( - int(round(brightness_value * 2.55)) if self._attr_is_on else 0 - ) - - device_info = await self._client.get_details() - - if ( - DEV_NAME in device_info - and DEV_MODEL in device_info - and ( - device_info[DEV_NAME] != self._name - or device_info[DEV_MODEL] != self._model - ) - ): - self._name = device_info[DEV_NAME] - self._model = device_info[DEV_MODEL] - - # If the name has changed, persist it in conf entry, - # so we will be able to restore this new name if hass - # is started while the LED string is offline. - self.hass.config_entries.async_update_entry( - self._conf, - data={ - CONF_HOST: self._client.host, # this cannot change - CONF_ID: self._attr_unique_id, # this cannot change - CONF_NAME: self._name, - CONF_MODEL: self._model, - }, - ) - - device_registry = dr.async_get(self.hass) - device_entry = device_registry.async_get_device( - {(DOMAIN, self._attr_unique_id)} - ) - if device_entry: - device_registry.async_update_device( - device_entry.id, name=self._name, model=self._model - ) - - if LightEntityFeature.EFFECT & self.supported_features: - await self.async_update_movies() - await self.async_update_current_movie() - - if not self._attr_available: - _LOGGER.warning("Twinkly '%s' is now available", self._client.host) - - # We don't use the echo API to track the availability since - # we already have to pull the device to get its state. - self._attr_available = True - except (TimeoutError, ClientError): - # We log this as "info" as it's pretty common that the Christmas - # light are not reachable in July - if self._attr_available: - _LOGGER.warning( - "Twinkly '%s' is not reachable (client error)", self._client.host - ) - self._attr_available = False - - async def async_update_movies(self) -> None: - """Update the list of movies (effects).""" - movies = await self._client.get_saved_movies() - _LOGGER.debug("Movies: %s", movies) - if movies and "movies" in movies: - self._movies = movies["movies"] - - async def async_update_current_movie(self) -> None: - """Update the current active movie.""" - current_movie = await self._client.get_current_movie() - _LOGGER.debug("Current movie: %s", current_movie) - if current_movie and "id" in current_movie: - self._current_movie = current_movie + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_attr() + super()._handle_coordinator_update() diff --git a/tests/components/twinkly/conftest.py b/tests/components/twinkly/conftest.py index 6b32c786c99..c66be97a257 100644 --- a/tests/components/twinkly/conftest.py +++ b/tests/components/twinkly/conftest.py @@ -10,11 +10,7 @@ from homeassistant.const import CONF_HOST, CONF_ID, CONF_MODEL, CONF_NAME from .const import TEST_MAC, TEST_MODEL, TEST_NAME -from tests.common import ( - MockConfigEntry, - load_json_array_fixture, - load_json_object_fixture, -) +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture @@ -55,7 +51,7 @@ def mock_twinkly_client() -> Generator[AsyncMock]: client.get_firmware_version.return_value = load_json_object_fixture( "get_firmware_version.json", DOMAIN ) - client.get_saved_movies.return_value = load_json_array_fixture( + client.get_saved_movies.return_value = load_json_object_fixture( "get_saved_movies.json", DOMAIN ) client.get_current_movie.return_value = load_json_object_fixture( diff --git a/tests/components/twinkly/fixtures/get_saved_movies.json b/tests/components/twinkly/fixtures/get_saved_movies.json index 0ee21f3254d..0fa7696d3df 100644 --- a/tests/components/twinkly/fixtures/get_saved_movies.json +++ b/tests/components/twinkly/fixtures/get_saved_movies.json @@ -1,4 +1,12 @@ -[ - { "id": 1, "name": "Rainbow" }, - { "id": 2, "name": "Flare" } -] +{ + "movies": [ + { + "id": 1, + "name": "Rainbow" + }, + { + "id": 2, + "name": "Flare" + } + ] +} diff --git a/tests/components/twinkly/snapshots/test_diagnostics.ambr b/tests/components/twinkly/snapshots/test_diagnostics.ambr index e9c89754ab7..814dc7dfc1f 100644 --- a/tests/components/twinkly/snapshots/test_diagnostics.ambr +++ b/tests/components/twinkly/snapshots/test_diagnostics.ambr @@ -4,8 +4,10 @@ 'attributes': dict({ 'brightness': 26, 'color_mode': 'rgb', - 'effect': None, + 'effect': '1 Rainbow', 'effect_list': list([ + '1 Rainbow', + '2 Flare', ]), 'friendly_name': 'Tree 1', 'hs_color': list([ diff --git a/tests/components/twinkly/snapshots/test_light.ambr b/tests/components/twinkly/snapshots/test_light.ambr index ac4e275a0a1..a97c3f941ff 100644 --- a/tests/components/twinkly/snapshots/test_light.ambr +++ b/tests/components/twinkly/snapshots/test_light.ambr @@ -6,6 +6,8 @@ 'area_id': None, 'capabilities': dict({ 'effect_list': list([ + '1 Rainbow', + '2 Flare', ]), 'supported_color_modes': list([ , @@ -43,8 +45,10 @@ 'attributes': ReadOnlyDict({ 'brightness': 26, 'color_mode': , - 'effect': None, + 'effect': '1 Rainbow', 'effect_list': list([ + '1 Rainbow', + '2 Flare', ]), 'friendly_name': 'Tree 1', 'hs_color': tuple( diff --git a/tests/components/twinkly/test_light.py b/tests/components/twinkly/test_light.py index c008ab51ef7..acf30764bab 100644 --- a/tests/components/twinkly/test_light.py +++ b/tests/components/twinkly/test_light.py @@ -9,6 +9,7 @@ from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion +from ttls.client import TwinklyError from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -25,6 +26,7 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, + STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant @@ -278,6 +280,28 @@ async def test_turn_off( mock_twinkly_client.turn_off.assert_called_once_with() +async def test_no_current_movie( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_twinkly_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test handling of missing current movie data.""" + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("light.tree_1").attributes[ATTR_EFFECT] == "1 Rainbow" + + mock_twinkly_client.get_current_movie.side_effect = TwinklyError + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("light.tree_1").state != STATE_UNAVAILABLE + assert hass.states.get("light.tree_1").attributes[ATTR_EFFECT] is None + + async def test_update_name( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -294,6 +318,10 @@ async def test_update_name( await setup_integration(hass, mock_config_entry) + dev_entry = device_registry.async_get_device({(DOMAIN, TEST_MAC)}) + + assert dev_entry.name == "Tree 1" + mock_twinkly_client.get_details.return_value["device_name"] = "new_device_name" freezer.tick(timedelta(seconds=30)) From cf3d4eb26af9429216e972d37d829845fca6a6ee Mon Sep 17 00:00:00 2001 From: Omni Flux Date: Mon, 23 Dec 2024 05:35:59 -0500 Subject: [PATCH 675/677] Respect ESPHome ClimateTrait supports_current_temperature (#132149) --- homeassistant/components/esphome/climate.py | 2 ++ tests/components/esphome/test_climate.py | 33 +++++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/homeassistant/components/esphome/climate.py b/homeassistant/components/esphome/climate.py index 8089fc4712a..478ce9bae2c 100644 --- a/homeassistant/components/esphome/climate.py +++ b/homeassistant/components/esphome/climate.py @@ -230,6 +230,8 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti @esphome_float_state_property def current_temperature(self) -> float | None: """Return the current temperature.""" + if not self._static_info.supports_current_temperature: + return None return self._state.current_temperature @property diff --git a/tests/components/esphome/test_climate.py b/tests/components/esphome/test_climate.py index 189b86fc5fd..2a5013444dd 100644 --- a/tests/components/esphome/test_climate.py +++ b/tests/components/esphome/test_climate.py @@ -484,3 +484,36 @@ async def test_climate_entity_attributes( assert state is not None assert state.state == HVACMode.COOL assert state.attributes == snapshot(name="climate-entity-attributes") + + +async def test_climate_entity_attribute_current_temperature_unsupported( + hass: HomeAssistant, + mock_client: APIClient, + mock_generic_device_entry, +) -> None: + """Test a climate entity with current temperature unsupported.""" + entity_info = [ + ClimateInfo( + object_id="myclimate", + key=1, + name="my climate", + unique_id="my_climate", + supports_current_temperature=False, + ) + ] + states = [ + ClimateState( + key=1, + current_temperature=30, + ) + ] + user_service = [] + await mock_generic_device_entry( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + state = hass.states.get("climate.test_myclimate") + assert state is not None + assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None From c5fe25a001a4ac768ac7a2f8e430ed3e635d8d45 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Mon, 23 Dec 2024 12:05:29 +0100 Subject: [PATCH 676/677] Bump deebot-client to 10.0.1 (#133634) --- homeassistant/components/ecovacs/manifest.json | 2 +- homeassistant/components/ecovacs/number.py | 2 +- homeassistant/components/ecovacs/select.py | 4 ++-- homeassistant/components/ecovacs/switch.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 271f9ee8dcd..3a2d4e7704b 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==9.4.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==10.0.1"] } diff --git a/homeassistant/components/ecovacs/number.py b/homeassistant/components/ecovacs/number.py index 2b9bdc1a425..adf282560a9 100644 --- a/homeassistant/components/ecovacs/number.py +++ b/homeassistant/components/ecovacs/number.py @@ -95,7 +95,7 @@ async def async_setup_entry( class EcovacsNumberEntity( - EcovacsDescriptionEntity[CapabilitySet[EventT, int]], + EcovacsDescriptionEntity[CapabilitySet[EventT, [int]]], NumberEntity, ): """Ecovacs number entity.""" diff --git a/homeassistant/components/ecovacs/select.py b/homeassistant/components/ecovacs/select.py index c8b01a0f83a..3c3852f05ec 100644 --- a/homeassistant/components/ecovacs/select.py +++ b/homeassistant/components/ecovacs/select.py @@ -66,7 +66,7 @@ async def async_setup_entry( class EcovacsSelectEntity( - EcovacsDescriptionEntity[CapabilitySetTypes[EventT, str]], + EcovacsDescriptionEntity[CapabilitySetTypes[EventT, [str], str]], SelectEntity, ): """Ecovacs select entity.""" @@ -77,7 +77,7 @@ class EcovacsSelectEntity( def __init__( self, device: Device, - capability: CapabilitySetTypes[EventT, str], + capability: CapabilitySetTypes[EventT, [str], str], entity_description: EcovacsSelectEntityDescription, **kwargs: Any, ) -> None: diff --git a/homeassistant/components/ecovacs/switch.py b/homeassistant/components/ecovacs/switch.py index 872981b5c28..288d092d391 100644 --- a/homeassistant/components/ecovacs/switch.py +++ b/homeassistant/components/ecovacs/switch.py @@ -131,7 +131,7 @@ class EcovacsSwitchEntity( await super().async_added_to_hass() async def on_event(event: EnableEvent) -> None: - self._attr_is_on = event.enable + self._attr_is_on = event.enabled self.async_write_ha_state() self._subscribe(self._capability.event, on_event) diff --git a/requirements_all.txt b/requirements_all.txt index 661571b2cb9..965b5673961 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -738,7 +738,7 @@ debugpy==1.8.11 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==9.4.0 +deebot-client==10.0.1 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9ff8ca7c990..b59be622158 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -628,7 +628,7 @@ dbus-fast==2.24.3 debugpy==1.8.11 # homeassistant.components.ecovacs -deebot-client==9.4.0 +deebot-client==10.0.1 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 386a7223931e5090c31a69c628163a8d9abf937d Mon Sep 17 00:00:00 2001 From: Simon <80467011+sorgfresser@users.noreply.github.com> Date: Mon, 23 Dec 2024 11:05:31 +0000 Subject: [PATCH 677/677] ElevenLabs invalid api key config flow testing (#133822) --- .../components/elevenlabs/quality_scale.yaml | 6 +----- tests/components/elevenlabs/conftest.py | 15 +++++++++----- .../components/elevenlabs/test_config_flow.py | 20 ++++++++++++++++++- 3 files changed, 30 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/elevenlabs/quality_scale.yaml b/homeassistant/components/elevenlabs/quality_scale.yaml index 49f0d7518f5..ecd2092492c 100644 --- a/homeassistant/components/elevenlabs/quality_scale.yaml +++ b/homeassistant/components/elevenlabs/quality_scale.yaml @@ -7,11 +7,7 @@ rules: appropriate-polling: done brands: done common-modules: done - config-flow-test-coverage: - status: todo - comment: > - We should have every test end in either ABORT or CREATE_ENTRY. - test_invalid_api_key should assert the kind of error that is raised. + config-flow-test-coverage: done config-flow: done dependency-transparency: done docs-actions: done diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py index c9ed49ba13c..d410f8bccdd 100644 --- a/tests/components/elevenlabs/conftest.py +++ b/tests/components/elevenlabs/conftest.py @@ -24,14 +24,19 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -@pytest.fixture -def mock_async_client() -> Generator[AsyncMock]: - """Override async ElevenLabs client.""" +def _client_mock(): client_mock = AsyncMock() client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) client_mock.models.get_all.return_value = MOCK_MODELS + return client_mock + + +@pytest.fixture +def mock_async_client() -> Generator[AsyncMock]: + """Override async ElevenLabs client.""" with patch( - "elevenlabs.AsyncElevenLabs", return_value=client_mock + "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", + return_value=_client_mock(), ) as mock_async_client: yield mock_async_client @@ -41,7 +46,7 @@ def mock_async_client_fail() -> Generator[AsyncMock]: """Override async ElevenLabs client.""" with patch( "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", - return_value=AsyncMock(), + return_value=_client_mock(), ) as mock_async_client: mock_async_client.side_effect = ApiError yield mock_async_client diff --git a/tests/components/elevenlabs/test_config_flow.py b/tests/components/elevenlabs/test_config_flow.py index 971fa75939a..95e7ab5214e 100644 --- a/tests/components/elevenlabs/test_config_flow.py +++ b/tests/components/elevenlabs/test_config_flow.py @@ -73,10 +73,28 @@ async def test_invalid_api_key( }, ) assert result["type"] is FlowResultType.FORM - assert result["errors"] + assert result["errors"] == {"base": "invalid_api_key"} mock_setup_entry.assert_not_called() + # Reset the side effect + mock_async_client_fail.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "api_key", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "ElevenLabs" + assert result["data"] == { + "api_key": "api_key", + } + assert result["options"] == {CONF_MODEL: DEFAULT_MODEL, CONF_VOICE: "voice1"} + + mock_setup_entry.assert_called_once() + async def test_options_flow_init( hass: HomeAssistant,